{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Otto-商品预测（基于SVM）"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 首先 import 必要的模块\n",
    "import pandas as pd \n",
    "import numpy as np\n",
    "\n",
    "from sklearn.metrics import accuracy_score\n",
    "from sklearn.metrics import classification_report\n",
    "from sklearn.metrics import confusion_matrix\n",
    "\n",
    "from matplotlib import pyplot as plt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "#读取数据\n",
    "train_file = open(\"C:/Users/chenxi/Desktop/learn/svm/svm作业/data/Otto_train.csv\")\n",
    "train = pd.read_csv(train_file)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<bound method NDFrame.head of           id  feat_1  feat_2  feat_3  feat_4  feat_5  feat_6  feat_7  feat_8  \\\n",
       "0          1       1       0       0       0       0       0       0       0   \n",
       "1          2       0       0       0       0       0       0       0       1   \n",
       "2          3       0       0       0       0       0       0       0       1   \n",
       "3          4       1       0       0       1       6       1       5       0   \n",
       "4          5       0       0       0       0       0       0       0       0   \n",
       "5          6       2       1       0       0       7       0       0       0   \n",
       "6          7       2       0       0       0       0       0       0       2   \n",
       "7          8       0       0       0       0       0       0       0       0   \n",
       "8          9       0       0       0       0       0       0       0       4   \n",
       "9         10       0       0       0       0       0       0       1       0   \n",
       "10        11       0       1       1       2       0       0       2       1   \n",
       "11        12       0       1       2       1       0       0       0       0   \n",
       "12        13       1       0       1       0       0       0       0       1   \n",
       "13        14       0       0       0       1       0       0       0       2   \n",
       "14        15       0       0       0       0       0       0       0       0   \n",
       "15        16       0       0       0       2       0       0       0       1   \n",
       "16        17       0       0       0       0       0       0       0       0   \n",
       "17        18       0       0       0       0       0       0       0       0   \n",
       "18        19       0       0       0       0       0       0       0       1   \n",
       "19        20       0       0       0       0       0       0       0       0   \n",
       "20        21       0       0       2       0       0       0       0       0   \n",
       "21        22       0       0       0       0       0       0       0       0   \n",
       "22        23       0       0       0       0       0       0       0       4   \n",
       "23        24       0       0       0       0       1       0       0       1   \n",
       "24        25       0       0       0       0       0       0       0       1   \n",
       "25        26       0       0       0       0       0       0       0       3   \n",
       "26        27       2       0       0       0       0       0       0       4   \n",
       "27        28       0       0       0       0       0       0       0       1   \n",
       "28        29       0       0       0       0       3       0       0       0   \n",
       "29        30       2       0       0       0       0       0       4       1   \n",
       "...      ...     ...     ...     ...     ...     ...     ...     ...     ...   \n",
       "61848  61849       0       0       0       0       0       0       0       2   \n",
       "61849  61850       5       0       1       1       0       0       1       1   \n",
       "61850  61851       0       0       1       0       0       0       0       6   \n",
       "61851  61852       3       0       0       0       0       0       0       0   \n",
       "61852  61853       0       0       0       0       0       0       0       6   \n",
       "61853  61854       0       0       0       0       0       0       0       0   \n",
       "61854  61855       0       0       1       0       0       0       0       0   \n",
       "61855  61856       0       0       0       0       0       0       0       0   \n",
       "61856  61857       0       0       1       0       0       0       1       3   \n",
       "61857  61858       0       0       0       0       0       0       0      12   \n",
       "61858  61859       4       1       0       0       0       0       0      14   \n",
       "61859  61860       0       0       0       0       1       0       0       2   \n",
       "61860  61861       0       0       0       0       0       0       0       3   \n",
       "61861  61862       0       0       0       0       0       0       0      12   \n",
       "61862  61863       2       0       0       0       0       0       0       1   \n",
       "61863  61864       0       0       0       0       1       0       0       4   \n",
       "61864  61865       0       0       0       0       0       0       0       1   \n",
       "61865  61866       0       0       0       0       0       0       0       2   \n",
       "61866  61867       0       0       0       0       0       0       0      15   \n",
       "61867  61868       0       0       0       0       0       0       0       0   \n",
       "61868  61869       0       0       0       0       0       0       0       2   \n",
       "61869  61870       0       0       0       0       0       0       0       2   \n",
       "61870  61871       1       0       1       0       1       0       0       0   \n",
       "61871  61872       0       0       0       0       0       0       1       1   \n",
       "61872  61873       0       0       0       0       0       0       0       0   \n",
       "61873  61874       1       0       0       1       1       0       0       0   \n",
       "61874  61875       4       0       0       0       0       0       0       0   \n",
       "61875  61876       0       0       0       0       0       0       0       3   \n",
       "61876  61877       1       0       0       0       0       0       0       0   \n",
       "61877  61878       0       0       0       0       0       0       0       0   \n",
       "\n",
       "       feat_9   ...     feat_85  feat_86  feat_87  feat_88  feat_89  feat_90  \\\n",
       "0           0   ...           1        0        0        0        0        0   \n",
       "1           0   ...           0        0        0        0        0        0   \n",
       "2           0   ...           0        0        0        0        0        0   \n",
       "3           0   ...           0        1        2        0        0        0   \n",
       "4           0   ...           1        0        0        0        0        1   \n",
       "5           0   ...           0        3        0        0        0        0   \n",
       "6           0   ...           1        1        0        0        0        0   \n",
       "7           0   ...           0        0        1        0        0        0   \n",
       "8           0   ...           0        2        0        0        0        0   \n",
       "9           0   ...           0        0        1        0        0        0   \n",
       "10          0   ...           0        0        1        0        1        0   \n",
       "11          0   ...           0        2        0        0        0        0   \n",
       "12          0   ...           0        2        0        5        0        0   \n",
       "13          0   ...           2        0        0        1        0        0   \n",
       "14          0   ...           0        0        2        0        0        0   \n",
       "15          0   ...           1        0        1        0        2        0   \n",
       "16          0   ...           0        2        0        0        0        0   \n",
       "17          0   ...           0        0        0        0        0        0   \n",
       "18          0   ...           0        0        3        1        0        0   \n",
       "19          0   ...           0        0        3        0        0        0   \n",
       "20          0   ...           0        0        2        0        0        0   \n",
       "21          0   ...           0        0        0        1        0        0   \n",
       "22          0   ...           0        0        0        0        0        0   \n",
       "23          1   ...           0        0        1        0        0        0   \n",
       "24          0   ...           0        0        0        0        0        0   \n",
       "25          0   ...           0        0        0        1        0        0   \n",
       "26          0   ...           0        0        0        0        1        0   \n",
       "27          0   ...           0        0        0        0        0        0   \n",
       "28          0   ...           0        2        0        0        0        0   \n",
       "29          0   ...           0        0        3        2        0        0   \n",
       "...       ...   ...         ...      ...      ...      ...      ...      ...   \n",
       "61848       0   ...           0        0        0        0        0        0   \n",
       "61849       0   ...           0        2        0        0        0        0   \n",
       "61850       0   ...           0        0        0        0        0        0   \n",
       "61851       0   ...           0        1        0        1        0        0   \n",
       "61852       0   ...           0        0        0        0        3        0   \n",
       "61853       0   ...           0        1        1        0        0        0   \n",
       "61854       0   ...           0        0        0        0        0        2   \n",
       "61855       0   ...           0        0        0        0        0        0   \n",
       "61856       0   ...           1        7        3        0        1        0   \n",
       "61857       0   ...           0        0        0        1        1        0   \n",
       "61858       0   ...           2        0        2        1        0        0   \n",
       "61859       0   ...           0        1        1        0        2        0   \n",
       "61860       0   ...           0        0        0        0        1        0   \n",
       "61861       0   ...           0        0        0        0        0        0   \n",
       "61862       3   ...           0        5        2        0        0        0   \n",
       "61863       0   ...           0        0        0        1        0        0   \n",
       "61864       0   ...           0        8        1        0        0        0   \n",
       "61865       0   ...           0        0        1        0        0        0   \n",
       "61866       3   ...           0        0        0        0        3        0   \n",
       "61867       0   ...           0        0        2        0        0        0   \n",
       "61868       0   ...           0        2        0        0        2        0   \n",
       "61869       0   ...           0        3        2        0        0        0   \n",
       "61870       0   ...           1        0        0        0        0        0   \n",
       "61871       0   ...           0        0        0        0        1        0   \n",
       "61872       0   ...           0        1        0        0        0        0   \n",
       "61873       0   ...           1        0        0        0        0        0   \n",
       "61874       0   ...           0        2        0        0        2        0   \n",
       "61875       1   ...           0        3        1        0        0        0   \n",
       "61876       0   ...           0        0        0        0        1        0   \n",
       "61877       0   ...           0        0        0        0        0        0   \n",
       "\n",
       "       feat_91  feat_92  feat_93   target  \n",
       "0            0        0        0  Class_1  \n",
       "1            0        0        0  Class_1  \n",
       "2            0        0        0  Class_1  \n",
       "3            0        0        0  Class_1  \n",
       "4            0        0        0  Class_1  \n",
       "5            2        0        0  Class_1  \n",
       "6            0        0        1  Class_1  \n",
       "7            0        0        0  Class_1  \n",
       "8            0        0        1  Class_1  \n",
       "9            1        0        0  Class_1  \n",
       "10           0        1        0  Class_1  \n",
       "11           0        1        0  Class_1  \n",
       "12           0        0        0  Class_1  \n",
       "13           0        0        0  Class_1  \n",
       "14           0        0        0  Class_1  \n",
       "15           0        0        0  Class_1  \n",
       "16           0        0        0  Class_1  \n",
       "17           0        0        0  Class_1  \n",
       "18           1        0        1  Class_1  \n",
       "19           0        0        0  Class_1  \n",
       "20           0        0        0  Class_1  \n",
       "21           2        0        0  Class_1  \n",
       "22           0        0        0  Class_1  \n",
       "23           0        0        0  Class_1  \n",
       "24           0        0        0  Class_1  \n",
       "25           0        0        0  Class_1  \n",
       "26           1        0        0  Class_1  \n",
       "27           1        1        0  Class_1  \n",
       "28           0        1        0  Class_1  \n",
       "29           0        0        1  Class_1  \n",
       "...        ...      ...      ...      ...  \n",
       "61848        4        0        0  Class_9  \n",
       "61849        0        5        0  Class_9  \n",
       "61850        0        0        0  Class_9  \n",
       "61851        0        7        0  Class_9  \n",
       "61852        0        0        0  Class_9  \n",
       "61853        0        0        0  Class_9  \n",
       "61854        0        0        0  Class_9  \n",
       "61855        0        0        0  Class_9  \n",
       "61856        0        1        0  Class_9  \n",
       "61857        1        0        0  Class_9  \n",
       "61858        1        0        0  Class_9  \n",
       "61859        0        3        0  Class_9  \n",
       "61860        0        0        0  Class_9  \n",
       "61861        0        0        0  Class_9  \n",
       "61862        0        0        0  Class_9  \n",
       "61863        0        1        0  Class_9  \n",
       "61864        0        0        0  Class_9  \n",
       "61865        0        0        0  Class_9  \n",
       "61866        0        0        0  Class_9  \n",
       "61867        0        0        0  Class_9  \n",
       "61868        0        1        0  Class_9  \n",
       "61869        0        1        0  Class_9  \n",
       "61870        0        2        0  Class_9  \n",
       "61871        0        0        0  Class_9  \n",
       "61872        0        0        0  Class_9  \n",
       "61873        0        2        0  Class_9  \n",
       "61874        0        1        0  Class_9  \n",
       "61875        0        0        0  Class_9  \n",
       "61876        3       10        0  Class_9  \n",
       "61877        0        2        0  Class_9  \n",
       "\n",
       "[61878 rows x 95 columns]>"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train.head"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<class 'pandas.core.frame.DataFrame'>\n",
      "RangeIndex: 61878 entries, 0 to 61877\n",
      "Data columns (total 95 columns):\n",
      "id         61878 non-null int64\n",
      "feat_1     61878 non-null int64\n",
      "feat_2     61878 non-null int64\n",
      "feat_3     61878 non-null int64\n",
      "feat_4     61878 non-null int64\n",
      "feat_5     61878 non-null int64\n",
      "feat_6     61878 non-null int64\n",
      "feat_7     61878 non-null int64\n",
      "feat_8     61878 non-null int64\n",
      "feat_9     61878 non-null int64\n",
      "feat_10    61878 non-null int64\n",
      "feat_11    61878 non-null int64\n",
      "feat_12    61878 non-null int64\n",
      "feat_13    61878 non-null int64\n",
      "feat_14    61878 non-null int64\n",
      "feat_15    61878 non-null int64\n",
      "feat_16    61878 non-null int64\n",
      "feat_17    61878 non-null int64\n",
      "feat_18    61878 non-null int64\n",
      "feat_19    61878 non-null int64\n",
      "feat_20    61878 non-null int64\n",
      "feat_21    61878 non-null int64\n",
      "feat_22    61878 non-null int64\n",
      "feat_23    61878 non-null int64\n",
      "feat_24    61878 non-null int64\n",
      "feat_25    61878 non-null int64\n",
      "feat_26    61878 non-null int64\n",
      "feat_27    61878 non-null int64\n",
      "feat_28    61878 non-null int64\n",
      "feat_29    61878 non-null int64\n",
      "feat_30    61878 non-null int64\n",
      "feat_31    61878 non-null int64\n",
      "feat_32    61878 non-null int64\n",
      "feat_33    61878 non-null int64\n",
      "feat_34    61878 non-null int64\n",
      "feat_35    61878 non-null int64\n",
      "feat_36    61878 non-null int64\n",
      "feat_37    61878 non-null int64\n",
      "feat_38    61878 non-null int64\n",
      "feat_39    61878 non-null int64\n",
      "feat_40    61878 non-null int64\n",
      "feat_41    61878 non-null int64\n",
      "feat_42    61878 non-null int64\n",
      "feat_43    61878 non-null int64\n",
      "feat_44    61878 non-null int64\n",
      "feat_45    61878 non-null int64\n",
      "feat_46    61878 non-null int64\n",
      "feat_47    61878 non-null int64\n",
      "feat_48    61878 non-null int64\n",
      "feat_49    61878 non-null int64\n",
      "feat_50    61878 non-null int64\n",
      "feat_51    61878 non-null int64\n",
      "feat_52    61878 non-null int64\n",
      "feat_53    61878 non-null int64\n",
      "feat_54    61878 non-null int64\n",
      "feat_55    61878 non-null int64\n",
      "feat_56    61878 non-null int64\n",
      "feat_57    61878 non-null int64\n",
      "feat_58    61878 non-null int64\n",
      "feat_59    61878 non-null int64\n",
      "feat_60    61878 non-null int64\n",
      "feat_61    61878 non-null int64\n",
      "feat_62    61878 non-null int64\n",
      "feat_63    61878 non-null int64\n",
      "feat_64    61878 non-null int64\n",
      "feat_65    61878 non-null int64\n",
      "feat_66    61878 non-null int64\n",
      "feat_67    61878 non-null int64\n",
      "feat_68    61878 non-null int64\n",
      "feat_69    61878 non-null int64\n",
      "feat_70    61878 non-null int64\n",
      "feat_71    61878 non-null int64\n",
      "feat_72    61878 non-null int64\n",
      "feat_73    61878 non-null int64\n",
      "feat_74    61878 non-null int64\n",
      "feat_75    61878 non-null int64\n",
      "feat_76    61878 non-null int64\n",
      "feat_77    61878 non-null int64\n",
      "feat_78    61878 non-null int64\n",
      "feat_79    61878 non-null int64\n",
      "feat_80    61878 non-null int64\n",
      "feat_81    61878 non-null int64\n",
      "feat_82    61878 non-null int64\n",
      "feat_83    61878 non-null int64\n",
      "feat_84    61878 non-null int64\n",
      "feat_85    61878 non-null int64\n",
      "feat_86    61878 non-null int64\n",
      "feat_87    61878 non-null int64\n",
      "feat_88    61878 non-null int64\n",
      "feat_89    61878 non-null int64\n",
      "feat_90    61878 non-null int64\n",
      "feat_91    61878 non-null int64\n",
      "feat_92    61878 non-null int64\n",
      "feat_93    61878 non-null int64\n",
      "target     61878 non-null object\n",
      "dtypes: int64(94), object(1)\n",
      "memory usage: 44.8+ MB\n"
     ]
    }
   ],
   "source": [
    "train.info()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>id</th>\n",
       "      <th>feat_1</th>\n",
       "      <th>feat_2</th>\n",
       "      <th>feat_3</th>\n",
       "      <th>feat_4</th>\n",
       "      <th>feat_5</th>\n",
       "      <th>feat_6</th>\n",
       "      <th>feat_7</th>\n",
       "      <th>feat_8</th>\n",
       "      <th>feat_9</th>\n",
       "      <th>...</th>\n",
       "      <th>feat_84</th>\n",
       "      <th>feat_85</th>\n",
       "      <th>feat_86</th>\n",
       "      <th>feat_87</th>\n",
       "      <th>feat_88</th>\n",
       "      <th>feat_89</th>\n",
       "      <th>feat_90</th>\n",
       "      <th>feat_91</th>\n",
       "      <th>feat_92</th>\n",
       "      <th>feat_93</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>count</th>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.00000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61878.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>mean</th>\n",
       "      <td>30939.500000</td>\n",
       "      <td>0.38668</td>\n",
       "      <td>0.263066</td>\n",
       "      <td>0.901467</td>\n",
       "      <td>0.779081</td>\n",
       "      <td>0.071043</td>\n",
       "      <td>0.025696</td>\n",
       "      <td>0.193704</td>\n",
       "      <td>0.662433</td>\n",
       "      <td>1.011296</td>\n",
       "      <td>...</td>\n",
       "      <td>0.070752</td>\n",
       "      <td>0.532306</td>\n",
       "      <td>1.128576</td>\n",
       "      <td>0.393549</td>\n",
       "      <td>0.874915</td>\n",
       "      <td>0.457772</td>\n",
       "      <td>0.812421</td>\n",
       "      <td>0.264941</td>\n",
       "      <td>0.380119</td>\n",
       "      <td>0.126135</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>std</th>\n",
       "      <td>17862.784315</td>\n",
       "      <td>1.52533</td>\n",
       "      <td>1.252073</td>\n",
       "      <td>2.934818</td>\n",
       "      <td>2.788005</td>\n",
       "      <td>0.438902</td>\n",
       "      <td>0.215333</td>\n",
       "      <td>1.030102</td>\n",
       "      <td>2.255770</td>\n",
       "      <td>3.474822</td>\n",
       "      <td>...</td>\n",
       "      <td>1.151460</td>\n",
       "      <td>1.900438</td>\n",
       "      <td>2.681554</td>\n",
       "      <td>1.575455</td>\n",
       "      <td>2.115466</td>\n",
       "      <td>1.527385</td>\n",
       "      <td>4.597804</td>\n",
       "      <td>2.045646</td>\n",
       "      <td>0.982385</td>\n",
       "      <td>1.201720</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>min</th>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25%</th>\n",
       "      <td>15470.250000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>50%</th>\n",
       "      <td>30939.500000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>75%</th>\n",
       "      <td>46408.750000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>max</th>\n",
       "      <td>61878.000000</td>\n",
       "      <td>61.00000</td>\n",
       "      <td>51.000000</td>\n",
       "      <td>64.000000</td>\n",
       "      <td>70.000000</td>\n",
       "      <td>19.000000</td>\n",
       "      <td>10.000000</td>\n",
       "      <td>38.000000</td>\n",
       "      <td>76.000000</td>\n",
       "      <td>43.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>76.000000</td>\n",
       "      <td>55.000000</td>\n",
       "      <td>65.000000</td>\n",
       "      <td>67.000000</td>\n",
       "      <td>30.000000</td>\n",
       "      <td>61.000000</td>\n",
       "      <td>130.000000</td>\n",
       "      <td>52.000000</td>\n",
       "      <td>19.000000</td>\n",
       "      <td>87.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>8 rows × 94 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "                 id       feat_1        feat_2        feat_3        feat_4  \\\n",
       "count  61878.000000  61878.00000  61878.000000  61878.000000  61878.000000   \n",
       "mean   30939.500000      0.38668      0.263066      0.901467      0.779081   \n",
       "std    17862.784315      1.52533      1.252073      2.934818      2.788005   \n",
       "min        1.000000      0.00000      0.000000      0.000000      0.000000   \n",
       "25%    15470.250000      0.00000      0.000000      0.000000      0.000000   \n",
       "50%    30939.500000      0.00000      0.000000      0.000000      0.000000   \n",
       "75%    46408.750000      0.00000      0.000000      0.000000      0.000000   \n",
       "max    61878.000000     61.00000     51.000000     64.000000     70.000000   \n",
       "\n",
       "             feat_5        feat_6        feat_7        feat_8        feat_9  \\\n",
       "count  61878.000000  61878.000000  61878.000000  61878.000000  61878.000000   \n",
       "mean       0.071043      0.025696      0.193704      0.662433      1.011296   \n",
       "std        0.438902      0.215333      1.030102      2.255770      3.474822   \n",
       "min        0.000000      0.000000      0.000000      0.000000      0.000000   \n",
       "25%        0.000000      0.000000      0.000000      0.000000      0.000000   \n",
       "50%        0.000000      0.000000      0.000000      0.000000      0.000000   \n",
       "75%        0.000000      0.000000      0.000000      1.000000      0.000000   \n",
       "max       19.000000     10.000000     38.000000     76.000000     43.000000   \n",
       "\n",
       "           ...            feat_84       feat_85       feat_86       feat_87  \\\n",
       "count      ...       61878.000000  61878.000000  61878.000000  61878.000000   \n",
       "mean       ...           0.070752      0.532306      1.128576      0.393549   \n",
       "std        ...           1.151460      1.900438      2.681554      1.575455   \n",
       "min        ...           0.000000      0.000000      0.000000      0.000000   \n",
       "25%        ...           0.000000      0.000000      0.000000      0.000000   \n",
       "50%        ...           0.000000      0.000000      0.000000      0.000000   \n",
       "75%        ...           0.000000      0.000000      1.000000      0.000000   \n",
       "max        ...          76.000000     55.000000     65.000000     67.000000   \n",
       "\n",
       "            feat_88       feat_89       feat_90       feat_91       feat_92  \\\n",
       "count  61878.000000  61878.000000  61878.000000  61878.000000  61878.000000   \n",
       "mean       0.874915      0.457772      0.812421      0.264941      0.380119   \n",
       "std        2.115466      1.527385      4.597804      2.045646      0.982385   \n",
       "min        0.000000      0.000000      0.000000      0.000000      0.000000   \n",
       "25%        0.000000      0.000000      0.000000      0.000000      0.000000   \n",
       "50%        0.000000      0.000000      0.000000      0.000000      0.000000   \n",
       "75%        1.000000      0.000000      0.000000      0.000000      0.000000   \n",
       "max       30.000000     61.000000    130.000000     52.000000     19.000000   \n",
       "\n",
       "            feat_93  \n",
       "count  61878.000000  \n",
       "mean       0.126135  \n",
       "std        1.201720  \n",
       "min        0.000000  \n",
       "25%        0.000000  \n",
       "50%        0.000000  \n",
       "75%        0.000000  \n",
       "max       87.000000  \n",
       "\n",
       "[8 rows x 94 columns]"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 各属性的统计特性\n",
    "train.describe()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 分割数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\model_selection\\_split.py:2026: FutureWarning: From version 0.21, test_size will always complement train_size unless both are specified.\n",
      "  FutureWarning)\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>feat_1</th>\n",
       "      <th>feat_2</th>\n",
       "      <th>feat_3</th>\n",
       "      <th>feat_4</th>\n",
       "      <th>feat_5</th>\n",
       "      <th>feat_6</th>\n",
       "      <th>feat_7</th>\n",
       "      <th>feat_8</th>\n",
       "      <th>feat_9</th>\n",
       "      <th>feat_10</th>\n",
       "      <th>...</th>\n",
       "      <th>feat_84</th>\n",
       "      <th>feat_85</th>\n",
       "      <th>feat_86</th>\n",
       "      <th>feat_87</th>\n",
       "      <th>feat_88</th>\n",
       "      <th>feat_89</th>\n",
       "      <th>feat_90</th>\n",
       "      <th>feat_91</th>\n",
       "      <th>feat_92</th>\n",
       "      <th>feat_93</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>39091</th>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6835</th>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>4</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10790</th>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>14</td>\n",
       "      <td>0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "      <td>2</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>56394</th>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>37348</th>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 93 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "       feat_1  feat_2  feat_3  feat_4  feat_5  feat_6  feat_7  feat_8  feat_9  \\\n",
       "39091       0       0       1       1       0       0       0       0       0   \n",
       "6835        0       0       0       0       0       0       0       0       0   \n",
       "10790       0       0       0       0       0       0       0       0      14   \n",
       "56394       0       0       0       0       0       0       0       2       0   \n",
       "37348       0       0       0       0       0       0       0       0       0   \n",
       "\n",
       "       feat_10   ...     feat_84  feat_85  feat_86  feat_87  feat_88  feat_89  \\\n",
       "39091        0   ...           0        0        0        0        0        0   \n",
       "6835         0   ...           0        0        1        0        4        0   \n",
       "10790        0   ...           0        1        0        2        2        0   \n",
       "56394        0   ...           0        0        0        0        0        2   \n",
       "37348        0   ...           0        0        0        0        0        0   \n",
       "\n",
       "       feat_90  feat_91  feat_92  feat_93  \n",
       "39091        0        0        1        0  \n",
       "6835         0        0        0        0  \n",
       "10790        0        0        0        0  \n",
       "56394        0        0        0        0  \n",
       "37348        0        0        1        0  \n",
       "\n",
       "[5 rows x 93 columns]"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 采用train_test_split，从数据集中随机抽取10000条记录，用来训练模型。\n",
    "from scipy.sparse import csr_matrix\n",
    "from sklearn.model_selection import train_test_split\n",
    "\n",
    "y = train['target']\n",
    "X = train.drop([\"id\", \"target\"], axis=1)\n",
    "\n",
    "# 保存特征名字\n",
    "feat_names = X.columns \n",
    "\n",
    "# 从数据集中随机分割出10,000的样本作为训练集\n",
    "X_train, X_drop, y_train, y_drop = train_test_split(X, y, train_size=10000, random_state=10)\n",
    "X_train.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 把训练集变为稀疏矩阵，加快模型训练速度\n",
    "X_train = csr_matrix(X_train)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "训练模型，默认的SVM"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.7543\n",
      "{'C': 0.1}\n"
     ]
    }
   ],
   "source": [
    "from sklearn.svm import LinearSVC\n",
    "from sklearn.model_selection import GridSearchCV\n",
    "\n",
    "Cs = np.logspace(-3, 3, 7)\n",
    "param_grid = {'C': Cs}\n",
    "\n",
    "svc_lr = LinearSVC(penalty='l2', loss='squared_hinge', max_iter=10000)\n",
    "grid = GridSearchCV( svc_lr, param_grid, cv=5)\n",
    "\n",
    "grid.fit(X_train, y_train)\n",
    "\n",
    "print(grid.best_score_)\n",
    "print(grid.best_params_)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('split0_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('split1_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('split2_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('split3_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('split4_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('mean_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('std_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "{'mean_fit_time': array([ 0.47011781,  3.95287538, 19.36483998, 22.36335654, 29.24645805,\n",
       "        34.2452024 , 36.38141069]),\n",
       " 'std_fit_time': array([0.13569801, 0.79970074, 0.37763906, 0.4937159 , 0.6146245 ,\n",
       "        0.83394787, 0.60511053]),\n",
       " 'mean_score_time': array([0.00339007, 0.00220027, 0.00259638, 0.00220585, 0.00220022,\n",
       "        0.00217695, 0.00260472]),\n",
       " 'std_score_time': array([0.00184945, 0.0003952 , 0.00080938, 0.00038799, 0.00039631,\n",
       "        0.00040792, 0.00049874]),\n",
       " 'param_C': masked_array(data=[0.001, 0.01, 0.1, 1.0, 10.0, 100.0, 1000.0],\n",
       "              mask=[False, False, False, False, False, False, False],\n",
       "        fill_value='?',\n",
       "             dtype=object),\n",
       " 'params': [{'C': 0.001},\n",
       "  {'C': 0.01},\n",
       "  {'C': 0.1},\n",
       "  {'C': 1.0},\n",
       "  {'C': 10.0},\n",
       "  {'C': 100.0},\n",
       "  {'C': 1000.0}],\n",
       " 'split0_test_score': array([0.76259352, 0.76708229, 0.76408978, 0.76309227, 0.76309227,\n",
       "        0.7436409 , 0.68029925]),\n",
       " 'split1_test_score': array([0.74800399, 0.74850299, 0.75199601, 0.750499  , 0.74500998,\n",
       "        0.66516966, 0.59580838]),\n",
       " 'split2_test_score': array([0.75087544, 0.75187594, 0.75287644, 0.75137569, 0.72936468,\n",
       "        0.66183092, 0.67533767]),\n",
       " 'split3_test_score': array([0.73947896, 0.74048096, 0.74148297, 0.74148297, 0.73997996,\n",
       "        0.70240481, 0.63677355]),\n",
       " 'split4_test_score': array([0.75751503, 0.75701403, 0.76102204, 0.75851703, 0.75400802,\n",
       "        0.69188377, 0.64178357]),\n",
       " 'mean_test_score': array([0.7517, 0.753 , 0.7543, 0.753 , 0.7463, 0.693 , 0.646 ]),\n",
       " 'std_test_score': array([0.00795154, 0.0088637 , 0.00790528, 0.00739955, 0.01157732,\n",
       "        0.02969017, 0.03055024]),\n",
       " 'rank_test_score': array([4, 2, 1, 2, 5, 6, 7]),\n",
       " 'split0_train_score': array([0.75834897, 0.76422764, 0.76597874, 0.76722952, 0.76772983,\n",
       "        0.73683552, 0.68642902]),\n",
       " 'split1_train_score': array([0.76138069, 0.76850925, 0.77101051, 0.77126063, 0.76675838,\n",
       "        0.69797399, 0.62156078]),\n",
       " 'split2_train_score': array([0.76540432, 0.7704037 , 0.7727784 , 0.77290339, 0.7624047 ,\n",
       "        0.68141482, 0.68591426]),\n",
       " 'split3_train_score': array([0.763993  , 0.77073963, 0.77411294, 0.77286357, 0.77061469,\n",
       "        0.73638181, 0.64855072]),\n",
       " 'split4_train_score': array([0.76174413, 0.7669915 , 0.76786607, 0.76874063, 0.76874063,\n",
       "        0.71289355, 0.66054473]),\n",
       " 'mean_train_score': array([0.76217422, 0.76817435, 0.77034933, 0.77059955, 0.76724965,\n",
       "        0.71309994, 0.6605999 ]),\n",
       " 'std_train_score': array([0.00241614, 0.00239326, 0.00302559, 0.00226641, 0.00273832,\n",
       "        0.02162502, 0.02440163])}"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "grid.cv_results_"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('mean_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEKCAYAAADjDHn2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3Xl81NXVx/HPIez7qig7CFTER1TEWrSKtQqtdWmtSrVVXBB3VFTUKu7FFRWwilvlaQUXLPq4FBVbta0KKAgCoogiEUSMoAICCTnPH3dihpCQycxvMjPJ9/16zYvMzG/unAHyO3Pv/d1zzd0RERHZkTqZDkBERLKfkoWIiFRKyUJERCqlZCEiIpVSshARkUopWYiISKWULEREpFJKFiIiUiklCxERqVTdTAcQlbZt23rXrl0zHYaISE555513vnL3dpUdV2OSRdeuXZkzZ06mwxARySlmtjyR4zQMJSIilVKyEBGRSilZiIhIpWrMnIWISFUUFhaSn5/Ppk2bMh1KtWjYsCEdO3akXr16Sb1eyUJEaqX8/HyaNWtG165dMbNMh5NW7k5BQQH5+fl069YtqTY0DCUitdKmTZto06ZNjU8UAGZGmzZtUupFKVmISK1VGxJFiVQ/q4ahJPts3QrffAMbNkBxcTS3rVujayuZW4sW0KFD6W3XXaFhw0z/TYskTMlC0sMd1q+Hr7+GtWvDreTnsn+WfeybbzIdffVo0yYkjfgkEp9MOnSAtm2hjgYAaqKCggJ+9rOfAfDFF1+Ql5dHu3ZhIfWsWbOoX79+pW0MGzaM0aNH07t377TGCkoWUplNmxI7wZd9bu1aKCqquN169aBVq3Br3Rrat4c+fbZ9rEkTyMsLJ8uob+lqt+ytpOv/zTfw+efb31auDH/OmwerV4ckW/bvKT6hlJdcdt0VGjdO3/8BSYs2bdowb948AK699lqaNm3KqFGjtjnG3XF36lTwheGRRx5Je5wllCxqg6IiWLeuat/uS/7c0YSYGbRsWXpyb9UKunbd9oQf/2f8z02alJ5Ia4OWLcNtjz0qPqawEL74YtskEn977z144YUwPFde+zvqoXToADvtpF5KDli6dCnHHHMMBx54IG+//TbPPfcc1113He+++y7ff/89J5xwAtdccw0ABx54IBMmTKBv3760bduWESNG8OKLL9K4cWOeeeYZdtppp8jiUrLIde7w6afhm+m8ebB4MRQUbHvS//bbHbfRtOm2J/JevXZ8oi/5s0ULnXyiVK8edOoUbjvy7bcV91A+/xzefz8kneLibV9Xty7sssuOeygdOoT/D7XNyJHh9ydK/frBXXcl9dJFixbxyCOPcN999wEwduxYWrduTVFREYMGDeK4446jT58+27zmm2++4eCDD2bs2LFcfPHFPPzww4wePTrlj1FCySKXbN4MixaVJoaSW0kyqFMHevQI3yA7dIA996z8W37LlpDA2KhkkebNw2333Ss+pqgoDGuV10P5/PPwpeKVV8r/ItG8ecW9kx49wv8rSasePXqw3377/XB/ypQpPPTQQxQVFbFy5UoWLVq0XbJo1KgRQ4YMAWDffffljTfeiDQmJYts9fXXYdghPiksWlQ6D9CkCey1F5x8cvgG068f9O0LjRplNm7JDnXrlp7g404621m/vuIeyuefw8yZsGpVuJqsxH33wVlnpf8zVKckewDp0qRJkx9+/uijj7j77ruZNWsWLVu25OSTTy53vUT8hHheXh5FO5ozTIKSRaaVHUYquX32Wekxu+4aksGRR5Ymhh49NAQkqWvaFHr3DreKbN0Ka9aE5HHJJXDFFfCb34QrtSTtvv32W5o1a0bz5s1ZtWoVM2bMYPDgwdUeh5JFdSpvGOm990ovFa1TJ/zSDhwI555bmhginKQSqbK8vHC1Wvv2MHFi6NH+8Y+hhyFpt88++9CnTx/69u1L9+7dGThwYEbiMC97qV6O6t+/v2fV5kdr15Y/jFRYGJ5v3Dj80pUkhJJhJF0CKdnuoovg7rthzhzYZ59MR5O0xYsXs/uO5n1qoPI+s5m94+79K3utehapcofly7cfRloet/nULruEZPCLX2w7jJSXl7m4RZJ17bXw2GNw3nnw739rOLSWULKoii1byr8aqeww0k9+AuecE5LCXnvBzjtnNm6RKLVoAWPHwmmnwV//Cn/4Q6YjkmqgZFGRRIeRfvc7DSNJ7XPKKXD//XDZZXDMMeFyW6nRlCwSGUZq317DSCLx6tSB8eNh//3h+uvh9tszHZGkWVqThZkNBu4G8oAH3X1smefHAYNidxsDO7l7y9hznYEHgU6AA79w908jD3LFCijZDMQsDCMdcACcfXbpMFL79pG/rUjO228/OP30MNl9+uk7XiQoOS9tycLM8oCJwM+BfGC2mT3r7otKjnH3i+KOPx/YO66JycBN7v6ymTUFytQuiEinTvDAA2FVat++YbGbiCTm5pvhqafgggvgpZdqV72vWiadlzEMAJa6+zJ33wJMBY7ewfFDgSkAZtYHqOvuLwO4+3p335iWKM3gjDNCd1qJQqRq2rULw1CvvAJ//3umo8kpBQUF9OvXj379+tG+fXs6dOjww/0tW7Yk3M7DDz/MF198kcZIg3Qmiw7Airj7+bHHtmNmXYBuwKuxh3oB68zsaTOba2a3xXoqIpJtzj479Mwvvhg2puc7XU1UUqJ83rx5jBgxgosuuuiH+4nsZVGiJiSL8vqjFa0APBF4yt1LCtDUBQ4CRgH7Ad2BU7d7A7PhZjbHzOasWbMm9YhFpOrq1g2T3cuXw623ZjqaGuHRRx9lwIAB9OvXj3POOYfi4mKKior4/e9/z5577knfvn255557ePzxx5k3bx4nnHBClXskVZXOCe58wuR0iY7AygqOPRE4t8xr57r7MgAzmw78GHgo/kXuPgmYBGEFdzRhi0iVHXwwnHAC3HILnHpq2NckxxxyyPaPHX98WDK1cWO4GLKsU08Nt6++guOO2/a5f/0ruTjef/99/v73v/Pf//6XunXrMnz4cKZOnUqPHj346quvWLBgAQDr1q2jZcuWjB8/ngkTJtCvX7/k3jBB6exZzAZ6mlk3M6tPSAjPlj3IzHoDrYA3y7y2lZm1i90/FFhU9rUikkVuvz1cUnvxxZmOJKe98sorzJ49m/79+9OvXz9ee+01Pv74Y3bbbTeWLFnChRdeyIwZM2jRokW1xpW2noW7F5nZecAMwqWzD7v7QjO7Hpjj7iWJYygw1eOKVLn7VjMbBcw0MwPeAR5IV6wiEoGOHUOBwSuvDFdGHX54piOqkh31BBo33vHzbdsm35Moy9057bTTuOGGG7Z7bv78+bz44ovcc889TJs2jUmTJkXzpglI6zoLd38BeKHMY9eUuX9tBa99GfiftAUnItG7+GJ4+OFwKe38+dpYKwmHHXYYxx13HBdeeCFt27aloKCADRs20KhRIxo2bMhvf/tbunXrxogRIwBo1qwZ3333Xdrj0gpuEYlOgwZhI6Ejj4R77oFRozIdUc7Zc889GTNmDIcddhjFxcXUq1eP++67j7y8PE4//XTcHTPjlltuAWDYsGGcccYZNGrUiFmzZlXpSqqqUIlyEYnekUfCa6/Bhx+GqstZSCXKg0RLlKu2sIhE7667QpXmyy/PdCQSEQ1DSdqtWhUuPdywofTP9u1hjz3CluL33bftcxs3wmGHwa9/DZs2hfnSxo3DAvuSP/fbL5Tt2rw57MET/1zjxtCsWbj8XzJkt93CENTNN4f9ujO0u5tER79OtZR7aRmflSvDlhzxJ+ymTeGgg8LzDz4YTvglz2/cCH36lF4h+ctfhu2Z40/2xx4b5jkBuncPJ/14Z50VkoQZnH9+6eONG4fbrruGZLF+fSjdtWFDiLnEzTeHZLFqFRx44Paf7+67wxzrBx/AoEHbJpLGjWH06HCxzscfw7hx2z//i1+E+pJffQULFmyfjFq3hnr1ovm3qLGuvBImTw6bJM2Zk5VVmkvG/2uDVKcclCyyVGFh6clo1SpYvXrbk3FxcTghAzz5ZLjwJP5k3qJFWFQLMGIEvP76tq/v3RvefTc8f/TR4Xc53sCBYRM0gDvvhMWLQzwlJ8viuLKOLVuG50pOtCXf/Evce284T8SfcDt2DM/l5cGXX4bHGjbcftO1tm3hu+9Coti8edtkBmF78hkztv3sGzfCT38anm/cGI46avueS8nvzerVMGVKeCw+oT37bEgWb70Fv/rV9v8+M2fCoYfCtGkwfPi2n71x45Bgf/SjMGz/17+GivYDBkD//rVo64cmTcLaixNPDBk/dvVOtmjYsCEFBQW0adOmxicMd6egoICGDRsm3YYmuJOwdSt8/z00alR6svv00+2HWk48MZwAX3kFXn55+xPatGnhJHvzzeFbePzzULrP0rBh8Je/bBtDy5ZhfyaA3/42tFVysmrSJJzoZs4Mz48ZE0728Se0zp1Lv9E//3w4IcefzFu3hl69wvMbNoQrIGv6N+mSf9cNG8IJvVEjKCgIPYv4f9uNG0OC3XXXkGQffXT7f/v77w8jMZMnh/2BVq8O72EWksjMmWHet6AgJL4GDTL72dPGPWTV+fPDZHebNpmO6AeFhYXk5+ezqWy3t4Zq2LAhHTt2pF6ZX+REJ7iVLIC5c0t/4UtuGzaEb+a9esHUqeEXvuRkUPJ/a8mS8Pztt8Oll27fbn4+dOgA110Hf/rT9kMZr78eThSTJ4c1TGW/nV5xRfimPWtWGOYpO27fu3d4n8LCMD5fw78c5bSvvw6JZdassAHj1Knhi8aIEeGLQr9+oTc2YEC41aiLdBYsgL33Dl2we+/NdDRShpJFFUyfHnaJLHsyfuCBsMXF66+Hb/Zlx7WHDQvDJB9/HMbGy76+U6dwEo+fHxCJ9+qr4YvCrFkhmXz3XegVLlsWnp88uXRYr1OnHP5/dMEFMHEivPNOyIySNZQsRHJMcXHora5eXVrUrnt3+OST8PPOO4dex29+E77c5JS1a0M3vHdveOONHM56NY/WWYjkmDp1wvBTfPXTxYvh7bdhwgQ44ghYurT0woTCwrCNxO9+F5Y1/Oc/WbydRKtWMHZsCPJvf8t0NJIE9SxEckzJsOZXX4VLkGfNCvNjEOZBJk4Mj2/YEIZI+/TJkjUnxcXw4x+HYJcsCYthJOPUsxCpoUpGcNq2DVfBrVgR1so880xYP7LvvuH5//wnrEVp0SKsmbn44jCxXnIVXbWrUydcNbJqFZRTUVWym3oWIjXUmjXhku1Zs8Jt7txwJd/cuWGO+dVXwzqQkquwdtqpmgI77bSw+GTBgtJL+iRjNMEtItsoLIT33w/zHHXrhvU9V19dusCyS5eQNCZPDuuDiou3XyQZidWrw2T3AQfAiy9qsjvDNAwlItuoVy8sdyiZv7jySvj223Bp+B13hOmEzz8PiQLg5JPDpePDhsGf/xwu7Y1ki+eddw6Lj2bMCEvlJSeoZyEi5br3XnjhhTCEtWZNeKxDhzCM1a7djl9bqcLCkLk2bIBFi8JyeckI9SxEJCXnnAPPPRdGjT75BB5/PMxxpJwoIHRzxo8PdXJuuy2CBiXdlCxEZIfMoGtXOP74UBCxuDjMTads0KBQ2OxPf4LlyyNoUNJJyUJEqmTMGNh/f/jsswgau/32kI0uuSSCxiSdlCxEpErOPDP8edFFETTWuXOYaZ82rbRMsmQlJQsRqZLOneGPf4Snnw4XNKVs1KhQBOv880vr8kvWUbIQkSq75BLo2TOc3zdvTrGxhg1DcavFi0t37JKsk9ZkYWaDzWyJmS01s9HlPD/OzObFbh+a2boyzzc3s8/NbEI64xSRqmnQIJzXCwvDBU0pO/JIGDIErr0WvvgiggYlamlLFmaWB0wEhgB9gKFm1if+GHe/yN37uXs/YDzwdJlmbgBeS1eMIpK8I44I9QAjqdhhFnoXmzaFAleSddLZsxgALHX3Ze6+BZgKHL2D44cCU0rumNm+wM7AS2mMUURSUL9+2Ip2ypTKj61Ur16h2uGjj8Kbb0bQoEQpncmiA7Ai7n5+7LHtmFkXoBvwaux+HeAOoJzNSkUkm0yaFPbUiGSy+49/DJubn39+2BRdskY6k0V51cEqqi1yIvCUu5f87zgHeMHdV1RwfHgDs+FmNsfM5qwpqUcgItVqxIgIJ7ubNg0rut95J2xOLlkjnckiH+gUd78jsLKCY08kbggKOAA4z8w+BW4H/mBmY8u+yN0nuXt/d+/fLpIaBCJSVSWT3R99BHfeGUGDQ4eGDTiuuAK+/jqCBiUK6UwWs4GeZtbNzOoTEsJ2JSbNrDfQCvhhkNLdT3L3zu7eFRgFTHZ3zXqJZKkjjoBf/zrsaZTyym6zkH3WroVrrokkPkld2pKFuxcB5wEzgMXAE+6+0MyuN7Oj4g4dCkz1mlL+VqSWGjcOBg4ME94p22svOPvsUBv9vfciaFBSpRLlIpKdvv46XCHVp08od6tNktJCJcpFJCNWr4bLLotgsrt167Cd3xtvhM3DJaOULEQkUvPmhQuaIpnsPv102GefUD9q/foIGpRkKVmISKQinezOy4MJE2DlSrjxxkjik+QoWYhI5MaNC39efHEEjR1wAJxySuiqfPhhBA1KMpQsRCRyJWXMp02LaGX32LGhOu3IkVBDLsrJNUoWIpIWl1wSehZ77hlBY+3bh4q0L74YNgaXaqdLZ0UkNxQWhvUXmzfDwoWhpyEp06WzIpIVPvoIBg+OYLK7Xj245x5Ytizs3S3VSslCRNKqfn14/fWIJrsPOyxcanXzzRFkH6kKJQsRSasuXUonu1+KYneaO+8Mk9yjRkXQmCRKyUJE0i7SPbu7dAkVaZ98El59NZL4pHJKFiKSdg0ahOmGDz8MtQFTduml0LUrXHBBmPiWtFOyEJFqMXgw/PWvMHx4BI01ahRW/i1cCPfeG0GDUhklCxGpNiedBI0bR9QZOPpoOPzwsOfF6tURNCg7omQhItXqo49g990jmOw2g7vvho0bwxyGpJWShYhUq86doU6diCa7f/SjUALkkUfg7bcjiU/Kp2QhItUqfrI7kjLmV18Nu+wSsk9xcQQNSnmULESk2g0eDMceG6qOp7y2rnlzuPVWmD079DAkLZQsRCQjxo0La+vuuy+Cxk46KWwAfsUVsHZtBA1KWUoWIpIRXbrAm29GtKeRGYwfD199BWPGRNCglKVkISIZs9deYbL7yy8jmOzee28466yw7mLBgkjik1JKFiKSUStXQu/eEU1233gjtGgRJrtryPYL2ULJQkQyatddYdCgiCa727SBm26C116DJ56IJD4J0poszGywmS0xs6VmNrqc58eZ2bzY7UMzWxd7vJ+ZvWlmC81svpmdkM44RSSzSia7IyljfuaZYUhq1CjYsCGCBgXSmCzMLA+YCAwB+gBDzaxP/DHufpG793P3fsB44OnYUxuBP7j7HsBg4C4za5muWEUksyItY56XFya78/PDvhcSiYSShZlNM7NfmllVkssAYKm7L3P3LcBU4OgdHD8UmALg7h+6+0exn1cCXwLtqvDeIpJjSsqYz5wZQWMDB8LJJ4cd9ZYujaBBSfTk/2fgd8BHZjbWzH6UwGs6ACvi7ufHHtuOmXUBugHbFac3swFAfeDjBGMVkRzUoEFYV3fLLRE1eOutYZu+kSMjarB2SyhZuPsr7n4SsA/wKfCymf3XzIaZWb0KXmblNVXBsScCT7n71m0aMNsF+F9gmLtvt47fzIab2Rwzm7NmzZpEPoqIZLEWLcKfCxeGUaSU7LJLqEj7/PPhJilJeFjJzNoApwJnAHOBuwnJ4+UKXpIPdIq73xFYWcGxJxIbgop7v+bA88Af3f2t8l7k7pPcvb+792/XTqNUIjXBd9/BT34SUYfgwgvDdbkjR0awkKN2S3TO4mngDaAx8Ct3P8rdH3f384GmFbxsNtDTzLqZWX1CQni2nLZ7A62AN+Meqw/8HZjs7k9W5QOJSG5r1gwuuyyiye769UPVwqVLI1rIUXuZJ7BwxcwOdfcqb3ZrZr8A7gLygIfd/SYzux6Y4+7Pxo65Fmjo7qPjXncy8AiwMK65U919XkXv1b9/f58zZ05VQxSRLLR5M/TtG1Z3z58f5jNScuyxIfMsWQIdO0YSY01hZu+4e/9Kj0swWZwL/M3dS9ZBtAKGunvW7GeoZCFSs/zjHzBkCPzpTzB6u1VaVfTJJ9CnT9hdb+rUSOKrKRJNFonOWZxZkigA3H0tcGaywYmIVGbwYDjhBNiyJYLGunULY1uPPw7/+lcEDdY+ifYs5gN7eezg2IK7+bFFc1lBPQuRmsc9FJSNxMaNYT/XFi3g3Xehbt2IGs5tUfcsZgBPmNnPzOxQwpVL/0glQBGRypQkihkz4PXXU2ysceMwyb1gAfz5zynHVtsk2rOoA5wF/IywfuIl4MGy6yIyST0LkZqpsBD22CMkjpQnu93h8MNhzpww2b3TTpHFmasi7Vm4e7G7/9ndj3P337j7/dmUKESk5qpXr3TP7nHjUmzMLDS2fj1ceWUk8dUWia6z6GlmT5nZIjNbVnJLd3AiIlC6Z/cNN0RQxnz33eGCC+Dhh+HttyOJrzZIdM7iEUJ9qCJgEDCZUIZDRKRaRFrGfMyYUA7k7LNhqwZJEpFosmjk7jMJcxzL3f1a4ND0hSUisq0uXUKRwcMPj2ATvObNw2T33Lma7E5QoteObYpNcn9kZucBnwOaGRKRanX++RE2dvzx8OCDcNVVcNxx0L59hI3XPIn2LEYS6kJdAOwLnAyckq6gREQq4g733w8TJqTYkBlMnAibNoVd9WSHKk0WsQV4x7v7enfPd/dhsSuiyq0EKyKSTmZh3cXll0cw2d2rV1jZ/be/wT//GUl8NVWlySJ2iey+ZpGtoxQRSUnJZPcll0TQ2JVXhnIg55wTUW2RminRYai5wDNm9nsz+3XJLZ2BiYhUpEuXMNXw1FPwckU76iSqUaOw9uKDD1TGfAcSXcH9SDkPu7ufFn1IydEKbpHaJb6M+cKFEZR6OvbYML61eHHIRrVEoiu4E/rrdfdhqYckIhKdBg3CxUybN0dUE/Cuu0IZ8wsvhOnTI2iwZknorzjWs9iuC5JNPQsRqX0OPrj055Qr1HbpEvbsHj0annsOjjwy5fhqkkTnLJ4j7If9PDATaA6sT1dQIiJVcd11cPLJETR00UWhHMj554eS5vKDRAsJTou7/Q04Huib3tBERBJTty489lhEe3bfey98+mnYok9+kGjPoqyeQOcoAxERSdaoUbDbbqFDsHlzio0dckjoptx6ayhjLkDiVWe/M7NvS27A/wGXpzc0EZHENGgA48dHVMYc4LbbwiW1550XQSGqmiHRYahm7t487tbL3aelOzgRkUQNHgzHHANjx4btKlLSvj3ceCO88go88UQk8eW6RHsWx5pZi7j7Lc3smPSFJSJSdffcA//9LzRtGkFjZ58Ne+8dJr2//TaCBnNbonMWY9z9m5I77r4OGJOekEREktOpU1gqARGc3/PyQvnyL74I+1/Ucokmi/KOq3SNhpkNNrMlZrbUzEaX8/w4M5sXu31oZuvinjvFzD6K3VThVkQSdtllMGBABJPd++8Pw4eHLsu8eZHElqsSTRZzzOxOM+thZt3NbBzwzo5eEKtWOxEYAvQBhppZn/hj3P0id+/n7v2A8cDTsde2JvRc9gcGAGPMrFVVPpiI1F6HHhouZIpksvvmm6F161BosLg4ggZzU6LJ4nxgC/A48ATwPXBuJa8ZACx192XuvgWYChy9g+OHAlNiPx8BvOzuX7v7WuBlYHCCsYpILVcy2X3DDbBiRYqNtW4dro568014pLwyebVDoldDbXD30e7eP3a70t03VPKyDkD8P1N+7LHtmFkXoBvwalVea2bDzWyOmc1Zs2ZNIh9FRGqJceNCRyCSPbv/8Ac48MCwiUZBQQQN5p5Er4Z62cxaxt1vZWYzKntZOY9VdMHyicBTsb0zEn6tu08qSWDt2rWrJBwRqU26dg1lzF95BVatSrGxOnXCyu516+CKK6IIL+ckOgzVNnYFFACxoaHK9uDOBzrF3e8IrKzg2BMpHYKq6mtFRMo1alSYu9hllwga23NPGDkSHngA3qp9G4UmmiyKzeyH8h5m1pWKewklZgM9zaybmdUnJIRnyx5kZr2BVsCbcQ/PAA6P9WBaAYfHHhMRSVjDhrDTTmE4av78CBocMwY6dAhrMIqKImgwdySaLK4C/m1m/2tm/wu8BuywL+buRcB5hJP8YuAJd19oZteb2VFxhw4FpnrcLkzu/jVwAyHhzAaujz0mIlJlV10FBxwQwWR3s2Zh34t588KwVC2S0E55AGa2EzAcmAc0BL5099fTGFuVaKc8EanI8uWh8vgvfwlPPpliY+4wZEhYKh7ZGFfmJLpTXqIT3GcQ9rG4JHb7X+DaVAIUEakuXbrAlVdGtGe3GUyYAFu2wCWXRBJfLkh0GOpCYD9gubsPAvYGdK2qiOSMUaOgR49QSDblld277RYuo50yBWbOjCS+bJdostjk7psAzKyBu38A9E5fWCIi0WrYMJQxLy6Gzz6LoMHRo6F7dzj33AiyT/ZLNFnkx9ZZTAdeNrNn0KWsIpJjhgyBRYugZ88IGmvUKAxHLVkCd9wRQYPZLdEV3Me6+zp3vxa4GngIUIlyEck59erBhg1hG9aUDRkCv/512Pvi008jaDB7VXlbVXd/zd2fjdV7EhHJOZMmwUknwfTpETR2111hhfeFF0bQWPZKdg9uEZGcde65YV+js86ClMvKdeoUFus9+2y41VBKFiJS69SvD48+Gko9nX12BNtsjxwZdl264ALYuDGSGLONkoWI1Ep77gnXXw/TpoUrYFNSr17YVW/5crjppkjiyzZKFiJSa40aFfY02nffCBr76U9DKfPbboMPPoigweyiZCEitVZeHkycCL17h6GolIejbr0VmjQJkyIpN5ZdlCxEpNbbsAGOPx4eeijFhnbeOWzD+uqrMHVqJLFlCyULEan1GjUKG+BddBF88kmKjQ0fDv37hy36vvkmkviygZKFiNR6deqE7bXNYNiwUBIkaXl5YbKS3wX9AAAQU0lEQVR79Wq45prIYsw0JQsREUJl2rvugtdeCzWkUtK/P4wYEcqBzJ0bSXyZlvB+FtlO+1mISKrc4Ve/CrvqffQRNGiQQmNr14aZ8+7dw94XdbLzu3mk+1mIiNQGZmGSe86cFBMFQKtWcPvt8PbbEcycZ56ShYhInJ13Dvt2b90Kb72VYmO//z0cdFAoZ/7VV5HElylKFiIi5bjhhrDO7r33UmjELOzV/e23IWHkMCULEZFynHcetG4dFmVvSaXGdt++4Zrchx4Kcxc5SslCRKQcbduGUubz54caUim55hro2DFULSwqiiS+6qZkISJSgaOOglNPhT/9KcxTJ61pU7j77pB5JkyIKrxqldZkYWaDzWyJmS01s3IH7MzseDNbZGYLzeyxuMdvjT222MzuMTNLZ6wiIuW56y445JAIGjr22LCz3tVXw+efR9Bg9UpbsjCzPGAiMAToAww1sz5ljukJXAEMdPc9gJGxx38CDAT+B+gL7AccnK5YRUQq0qIFzJwJ+++fYkNmYbVfYSFcckkksVWndPYsBgBL3X1ZbAvWqcDRZY45E5jo7msB3P3L2OMONATqAw2AesDqNMYqIrJD338f5qlfey2FRnr0gCuvhMcfh5dfjiy26pDOZNEBWBF3Pz/2WLxeQC8z+4+ZvWVmgwHc/U3gn8Cq2G2Guy9OY6wiIjtUXAz/939hDuO771Jo6LLLYLfdQhnzzZujCi/t0pksyptjKFtbpC7QEzgEGAo8aGYtzWw3YHegIyHBHGpmP93uDcyGm9kcM5uzJuWNdEVEKtakSdiKdfnysGlS0ho2DJPcH30UNkrKEelMFvlAp7j7HYGV5RzzjLsXuvsnwBJC8jgWeMvd17v7euBF4Mdl38DdJ7l7f3fv365du7R8CBGREgMHhkQxaRL84x8pNHTEEXDccWEL1pRrolePdCaL2UBPM+tmZvWBE4FnyxwzHRgEYGZtCcNSy4DPgIPNrK6Z1SNMbmsYSkQy7vrrYY894PzzQ0mQpI0bF8qZn39+Tuyql7Zk4e5FwHnADMKJ/gl3X2hm15vZUbHDZgAFZraIMEdxqbsXAE8BHwMLgPeA99z9/9IVq4hIoho2hClT4Lnnwrk+aR07wnXXwfPPw7Nlv0dnH5UoFxFJkjusXAkdyl66k6jCQthnn1A7atGiMDFSzVSiXEQkzcaMgb33hi+/rPzYctWrF3bV++wzuPHGSGOLmpKFiEiSTjghdArOPjuFaYcDDwzX495+e+hdZCklCxGRJO2xR+gQPP00PPZY5cdX6NZboVmzsPYiS6cGlCxERFJw0UXhktrzzkuh5FO7dqFa4b/+lWLWSR8lCxGRFOTlwV/+EmpILV2aQkNnnAEDBoS6UevWRRVeZJQsRERStNtuYUH2wamUO83LC7vqrVkTKtNmGSULEZEI1KsXFundeScsW5ZkI/vuC+ecE5LGO+9EGl+qlCxERCKyenVYZzdsWCg8mJQbbghzGOeck0Ij0VOyEBGJyK67hg3xXn89/JmUli3DZbSzZsGDD0YaXyq0gltEJELucPTR8NJLMHcu7L57ko0MGhS2YV2yJPQ00kQruEVEMsAsVKVt2jQMRyX1fdwszFt89x1cfnnkMSZDyUJEJGLt28PkyXDHHeG8n5Q+fcJltI88Av/+d6TxJUPDUCIiabZpU6hWW2UbNoRxrBYt4N13wyVXEdMwlIhIFrjxRvjxj5PcQbVJE7jnHnj/fRg/PvLYqkLJQkQkjfr1g/feC5fUJuXoo+GXvwwlbvPzI42tKpQsRETS6Mgj4bTT4JZb4K23kmjALPQuiorg4osjjy9RShYiImk2blzYGO+UU2DjxiQa6N4drroKnnwSZsyIPL5EKFmIiKRZ8+bhoqb8/LDWLimXXgo9e4bytps2RRpfIpQsRESqwaGHwvLlcMghSTbQoAFMnBhK2956a5ShJUTJQkSkmrRtGxbpTZsWdtirsp//PGzPd/PN8PHHkce3I0oWIiLV6MMP4fjjw3q7pNxxR1hvccEF1bqrnpKFiEg16t07TD88+CC88EISDXToANdfH148fXrk8VVEK7hFRKrZ5s3Qvz8UFIT1dq1bV7GBoqKw98XatbBoUShElaSsWMFtZoPNbImZLTWz0RUcc7yZLTKzhWb2WNzjnc3sJTNbHHu+azpjFRGpLg0ahNpRa9aEi5uqrG7dUGhwxYqw/0U1qJuuhs0sD5gI/BzIB2ab2bPuvijumJ7AFcBAd19rZjvFNTEZuMndXzazpkD27AIiIpKivfeG224Lo0pJGTgwrPa78074wx9gjz0ija+sdPYsBgBL3X2Zu28BpgJHlznmTGCiu68FcPcvAcysD1DX3V+OPb7e3ZNZyiIikrVGjoTf/jb8nNSMwC23hEUc55yT9snudCaLDsCKuPv5scfi9QJ6mdl/zOwtMxsc9/g6M3vazOaa2W2xnoqISI0zfjz87ndJnO/btoUJE+Dss9MSV7y0DUMB5VVxL/tXURfoCRwCdATeMLO+sccPAvYGPgMeB04FHtrmDcyGA8MBOnfuHF3kIiLVaMsWmDoVhgwJI0pVMnRoWmIqK509i3ygU9z9jsDKco55xt0L3f0TYAkheeQDc2NDWEXAdGCfsm/g7pPcvb+792+Xxm0HRUTSaeRIOOigsHRixYrKj8+EdCaL2UBPM+tmZvWBE4FnyxwzHRgEYGZtCcNPy2KvbWVmJRngUGARIiI1UF5eqB1VVASnn16ta+0SlrZkEesRnAfMABYDT7j7QjO73syOih02Aygws0XAP4FL3b3A3bcCo4CZZraAMKT1QLpiFRHJtB494Pbb4ZVXkixlnmZalCcikiXcYf582Guv6nvPrFiUJyIiiTMrTRSzZ8PWrZmNJ56ShYhIlpkzBwYMgLvuynQkpZQsRESyzL77wjHHhM3xFmXJpT1KFiIiWcYM7rsPmjULW7EWFmY6IiULEZGstPPO8Oc/hyGpsWMzHY2ShYhI1jruuLDuolWrTEeS3nIfIiKSogcfzHQEgXoWIiI5YOrUsPV2pihZiIjkgJkz4eqr4b//zcz7K1mIiOSAO++Ezp3D1VEbNlT/+ytZiIjkgGbNQrHBpUvhiiuq//2VLEREcsQhh8CFF4bNkhYsqN731tVQIiI55Oab4dBDYc89q/d91bMQEckhjRvDUbFNHlatqr73VbIQEclB//43dO8Ozz1XPe+nZCEikoP22w969oQzz4SCgvS/n5KFiEgOatAAJk8OiWL69PS/nya4RURyVL9+8OGH0LVr+t9LPQsRkRxWHYkClCxERCQBShYiIlIpJQsREamUkoWIiFQqrcnCzAab2RIzW2pmoys45ngzW2RmC83ssTLPNTezz81sQjrjFBGRHUvbpbNmlgdMBH4O5AOzzexZd18Ud0xP4ApgoLuvNbOdyjRzA/BaumIUEZHEpLNnMQBY6u7L3H0LMBU4uswxZwIT3X0tgLt/WfKEme0L7Ay8lMYYRUQkAelMFh2AFXH382OPxesF9DKz/5jZW2Y2GMDM6gB3AJfu6A3MbLiZzTGzOWvWrIkwdBERiZfOFdxWzmNezvv3BA4BOgJvmFlf4GTgBXdfYVZeM7HG3CcBkwDMbI2ZLU8h3rbAVym8PlvUlM8B+izZqqZ8lpryOSC1z9IlkYPSmSzygU5x9zsCK8s55i13LwQ+MbMlhORxAHCQmZ0DNAXqm9l6dy93khzA3dulEqyZzXH3/qm0kQ1qyucAfZZsVVM+S035HFA9nyWdw1CzgZ5m1s3M6gMnAs+WOWY6MAjAzNoShqWWuftJ7t7Z3bsCo4DJO0oUIiKSXmlLFu5eBJwHzAAWA0+4+0Izu97MYlt3MAMoMLNFwD+BS929GortiohIVaS16qy7vwC8UOaxa+J+duDi2K2iNv4C/CU9EW5jUjW8R3WoKZ8D9FmyVU35LDXlc0A1fBYL52sREZGKqdyHiIhUSskixsxuMLP5ZjbPzF4ys10zHVOyzOw2M/sg9nn+bmYtMx1Tsszst7FSMMVmlnNXriRS8iZXmNnDZvalmb2f6VhSYWadzOyfZrY49n/rwkzHlCwza2hms8zsvdhnuS5t76VhqMDMmrv7t7GfLwD6uPuIDIeVFDM7HHjV3YvM7BYAd788w2Elxcx2B4qB+4FR7j4nwyElLFby5kPiSt4AQ+NL3uQSM/spsJ5wdWLfTMeTLDPbBdjF3d81s2bAO8AxufjvYmEhWhN3X29m9YB/Axe6+1tRv5d6FjEliSKmCdsvIMwZ7v5S7Go0gLcIa1xykrsvdvclmY4jSYmUvMkZ7v468HWm40iVu69y93djP39HuFqzbHWJnODB+tjderFbWs5dShZxzOwmM1sBnARcU9nxOeI04MVMB1FLJVLyRjLIzLoCewNvZzaS5JlZnpnNA74EXnb3tHyWWpUszOwVM3u/nNvRAO5+lbt3Av5GWCOStSr7LLFjrgKKCJ8nayXyWXJUIiVvJEPMrCkwDRhZZmQhp7j7VnfvRxhBGBArmRS5tK6zyDbufliChz4GPA+MSWM4Kanss5jZKcCRwM88yyemqvDvkmsSKXkjGRAb358G/M3dn850PFFw93Vm9i9gMBD5RQi1qmexI7G9NUocBXyQqVhSFaveezlwlLtvzHQ8tVgiJW+kmsUmhR8CFrv7nZmOJxVm1q7kakczawQcRprOXboaKsbMpgG9CVfeLAdGuPvnmY0qOWa2FGgAlJROeSuHr+w6FhgPtAPWAfPc/YjMRpU4M/sFcBeQBzzs7jdlOKSkmdkUQoXotsBqYIy7P5TRoJJgZgcCbwALCL/vAFfGKk7kFDP7H+BRwv+vOoSySten5b2ULEREpDIahhIRkUopWYiISKWULEREpFJKFiIiUiklCxERqZSShUgVmNn6yo/a4eufMrPusZ+bmtn9ZvZxrGLo62a2v5nVj/1cqxbNSnZTshCpJma2B5Dn7stiDz1IKMzX0933AE4F2saKDs4ETshIoCLlULIQSYIFt8VqWC0wsxNij9cxs3tjPYXnzOwFMzsu9rKTgGdix/UA9gf+6O7FALHqtM/Hjp0eO14kK6ibK5KcXwP9gL0IK5pnm9nrwECgK7AnsBOh/PXDsdcMBKbEft6DsBp9awXtvw/sl5bIRZKgnoVIcg4EpsQqfq4GXiOc3A8EnnT3Ynf/Avhn3Gt2AdYk0ngsiWyJbc4jknFKFiLJKa/8+I4eB/geaBj7eSGwl5nt6HewAbApidhEIqdkIZKc14ETYhvPtAN+CswibGv5m9jcxc6EwnslFgO7Abj7x8Ac4LpYFVTMrGfJHh5m1gZY4+6F1fWBRHZEyUIkOX8H5gPvAa8Cl8WGnaYR9rF4n7Bv+NvAN7HXPM+2yeMMoD2w1MwWAA9Qut/FICDnqqBKzaWqsyIRM7Om7r4+1juYBQx09y9i+w38M3a/oontkjaeBq7I4f3HpYbR1VAi0XsutiFNfeCGWI8Dd//ezMYQ9uH+rKIXxzZKmq5EIdlEPQsREamU5ixERKRSShYiIlIpJQsREamUkoWIiFRKyUJERCqlZCEiIpX6f6o4eWw6R5hQAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "# plot误差曲线\n",
    "train_means = grid.cv_results_['mean_train_score']\n",
    "test_means = grid.cv_results_['mean_test_score']\n",
    "\n",
    "x_axis = np.log10(Cs)\n",
    "\n",
    "plt.plot(x_axis, np.array(train_means), 'r-', label='Train')    \n",
    "plt.plot(x_axis, np.array(test_means), 'b--', label='Test')    \n",
    "\n",
    "plt.legend()\n",
    "plt.xlabel( 'log(C)' )                                                                                                      \n",
    "plt.ylabel( 'accuracy' )\n",
    "\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "由上图可以看出，当log(C)=1时，即C=10时正确率达到最大"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "RBF-SVM"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n",
      "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\svm\\base.py:218: ConvergenceWarning: Solver terminated early (max_iter=5000).  Consider pre-processing your data with StandardScaler or MinMaxScaler.\n",
      "  % self.max_iter, ConvergenceWarning)\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "GridSearchCV(cv=5, error_score='raise',\n",
       "       estimator=SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0,\n",
       "  decision_function_shape='ovr', degree=3, gamma='auto', kernel='rbf',\n",
       "  max_iter=5000, probability=False, random_state=None, shrinking=True,\n",
       "  tol=0.001, verbose=False),\n",
       "       fit_params=None, iid=True, n_jobs=1,\n",
       "       param_grid={'C': [0.01, 0.1, 1, 10, 100], 'gamma': [0.0001, 0.001, 0.01, 0.1, 1]},\n",
       "       pre_dispatch='2*n_jobs', refit=True, return_train_score='warn',\n",
       "       scoring=None, verbose=0)"
      ]
     },
     "execution_count": 27,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.svm import SVC\n",
    "\n",
    "Cs = [ 0.01, 0.1, 1, 10, 100]\n",
    "gammas = [0.0001, 0.001, 0.01, 0.1, 1]\n",
    "\n",
    "param_grid = {'C': Cs, 'gamma': gammas}\n",
    "svc_rbf =  SVC( kernel='rbf', max_iter=5000)\n",
    "grid = GridSearchCV( svc_rbf, param_grid, cv=5)\n",
    "\n",
    "grid.fit(X_train, y_train)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.7767\n",
      "{'C': 10, 'gamma': 0.001}\n"
     ]
    }
   ],
   "source": [
    "print(grid.best_score_)\n",
    "print(grid.best_params_)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEKCAYAAAD9xUlFAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzs3Xd4FNX6wPHv2ZLeE0JJCJDQEVCkCKKiggUVKwpe9SJ2LwoiKpaLYG+gV7F3r4WqghUr6vUnCCi9E0oKJZBeN7t7fn/sJmxCypLsZjfJ+3mePJmdOTvz7kLOO3POzDlKa40QQggBYPB1AEIIIfyHJAUhhBCVJCkIIYSoJElBCCFEJUkKQgghKklSEEIIUUmSghBCiEpeTQpKqfOUUtuUUjuVUtNr2J6klPpZKfW3Umq9Umq0N+MRQghRN+Wth9eUUkZgOzAKSAdWAeO11ptdyrwB/K21flUp1Rv4Wmvd2SsBCSGEqJfJi/seDOzUWqcCKKXmARcDm13KaCDCuRwJZNa307i4ON25c2fPRiqEEC3cmjVrDmut29RXzptJIQFIc3mdDgypVmYm8J1S6g4gFBhZ046UUjcDNwMkJSWxevVqjwcrhBAtmVJqrzvlvNmnoGpYV72tajzwntY6ERgN/FcpdUxMWus3tNYDtdYD27SpN9EJIYRoIG8mhXSgo8vrRI5tHroBWACgtf4DCALivBiTEEKIOngzKawCuimluiilAoBxwNJqZfYBZwMopXrhSApZXoxJCCFEHbzWp6C1tiqlJgHLACPwjtZ6k1LqEWC11nopcDfwplLqLhxNSxN0A26HKi8vJz09ndLSUk9+hGYvKCiIxMREzGazr0MRQjQTXrsl1VsGDhyoq3c07969m/DwcGJjY1Gqpq6M1kdrzZEjRygoKKBLly6+DkcI4WNKqTVa64H1lWsRTzSXlpZKQqhGKUVsbKxcPQkhjkuLSAqAJIQayHcihDhe3nxOQQghjrKWQV465KVBbhrkZwIalBEMRjCYnD8ur1Ut6yu3NfZ9ruvlJAokKXjUt99+y+TJk7HZbNx4441Mn151uKeysjKuu+461qxZQ2xsLPPnz6fi6ewnn3ySt99+G6PRyIsvvsi5555b5z7nzp3LCy+8wK5du8jKyiIuTu7kFT5mKXJU9rn7IG+fYznP+To3DQoPcuyjSn5EGaolE4PLcm3JpHoS8kbictne6VSI7+XVr0GSgofYbDb+9a9/8f3335OYmMigQYMYM2YMvXv3rizz9ttvEx0dzc6dO5k3bx733Xcf8+fPZ/PmzcybN49NmzaRmZnJyJEj2b59O0Ct+zz11FO58MILGTFihI8+sWhVtIaSnKNn+RW/c/ceXS7JrvoegxkiEyAqCbqOhKiOENnR8TsqCSISHJWf3Qra5vhtt4Ld7rJcsa1iu8vvKu+p9j7tWr5aOe26f5ftVfZX7X1uxeAsYy2r+z11fV5tc8RXmwufl6TQXPz555907dqV5ORkAMaNG8eSJUuqJIUlS5Ywc+ZMAK644gomTZqE1polS5Ywbtw4AgMD6dKlC127duXPP/8EqHWfJ510UtN+QNGyaQ2Fh5wV/N6qFX/F2b6lsOp7zCFHK/kOA5yVfaej68LaOs5u62MI8M5naq7s9mpJzSUBBYR6/fAtLinM+mITmzPzPbrP3h0iePiiPnWWycjIoGPHow9wJyYmsnLlylrLmEwmIiMjOXLkCBkZGZxyyilV3puRkQFQ7z6FcIvNCgWZ1Sr7fUebdvLSwVZW9T1BkRCZBNGdocvpRyv7SOeZfkistMN7g8EAGMDom+eLWlxS8JWanveofvdPbWVqW2+3H3sZKXcUiRpVdOLm7qvajl/ZqZvhONN0FdrGUbm36ws9RzsSQEXTTmRHCIqo+VjCYyxWO3kl5ZU/+aXl5Fe8Li6vsi2vpJwbT0tmVO+2Xo2pxSWF+s7ovSUxMZG0tKODwqanp9OhQ4cayyQmJmK1WsnLyyMmJqbO99a3T9FKlBW6VPb7jm3aKTxYtbwyQHgHRyWfdErVyj4qCSITwRzsm8/SwpSW2xwVesmxlXiVCr/EekyZknJbnfsONhuJDDZX/jTFw8YtLin4yqBBg9ixYwe7d+8mISGBefPm8fHHH1cpM2bMGN5//32GDh3KokWLOOuss1BKMWbMGK6++mqmTp1KZmYmO3bsYPDgwWit692naAEqOnFrquwr1pXkVH2Pweyo2KM6QrdRR8/yK5p4IhJ81vzQ3GitKXFW7DWdoedXnsVba6zsLdY6OoaBsEATkcFmIoLNRAab6BwXUlnJRwSZiQwxu2w3V9kWYGr6R8kkKXiIyWRi7ty5nHvuudhsNiZOnEifPn2YMWMGAwcOZMyYMdxwww1ce+21dO3alZiYGObNmwdAnz59uPLKK+nduzcmk4mXX34Zo9HRQVfTPgFefPFFnnnmGQ4cOEC/fv0YPXo0b731ls8+v6iD3Q5Fh2pox3dJADV14lac2ScOPHqGX7EurK2z7VmAo2IvLLPWWJlXnKUfe+Z+tMmm3Fb7GbhSEB5oqqy8I4PNtI0Ic1ToNVTmlZV6sJmIIBMmY/P6d2oRYx9t2bKFXr28e5tWcyXfTRMpL4HMtZCz5+hZfuXvdLBZqpYPinKe2Vdrx69YFxLT6jpx7XZNQS1n4/mlNVforq/tdVRlRoMiIshUtcKupTKvchYfbCY8yITB0Pz/Ldwd+0iuFIRoCLsdDqyH1J9h18+wb0XVu3dC4x0VfPv+0PPCapV+y+rEtdltFFmLKC4vpqi8iKLyIgrLCytfuy4fKMglMz+XrKJ88kqLsNrMWK2BlFsCsJQHYLcFgy0IbXf+OJexBWFSwUQGhTjPwM1EhwTQOTa0xjN0x++jSSAs0CQ3abhJkoIQ7srd50gAqT9D6i9HH9aK7w2DboQup0FsN8cDW37eiVtuK3dU4NYiCi2FFFuPVuju/BRbiyvfV2Itce+g2oi2BaLtgaADCTIGYTRZMZpL0MHFQCn1PfFsN5ixBYRjDQinzBxGcUAYRnM4KiAce0AYVnM4ZYYwSuxhFJdHUKjCyLOHEW4JJywgjHBzOGbpa6mTJAUhalOSC3t+O5oIslMd68PaQfdzIflMSD4Dwtt5PRStNWW2slor54oz9cLyQsf6asvVz9gtdkv9BwUCjYGEmkOr/LQJaUOoKZTQgFDHb3ModlsAOYUGDuRBxhEbe7Ns5BYZ0PZAjATTvU0s/RPj6JsQRb/ESLq3DT+mE9Wu7Y44LYUUlBdQYCmoXC60FFJgKahcLrQUkl+eT6GlkMPFhyvXF1uL6/1MQcYgwgLCCDOHEREQUbkcHhB+9HdA1deuy6HmUEyGllt1ttxPJsTxslog/U9nElgOmX85hhwwh0Ln4TD4ZkgeAW16utXer7WmxFpSY+V8TMXtcuZd29m5rfpzBrUIMYUcU5G3D2tPqDmUMHMYIeYQQk2hhAWEVZatXO/ynhBzCGbDsWfVRwrLWJ+Rx4b0PNan57EhI5eD+Y6mM4OC7m3DObtLJP0SI+mbGEXPduEEmet/stmgDJUVcHvau/VZq7ParRSVFzkSSrkzkbgsVySXytfO35mFmRSWO5JNqa3+4eaDTcGOWM2OBBIWEEaEOaJyuWJ9lTIuSSjUHIrh2Ono/YIkBdF6aQ2Htjibg5bDnt+hvMgxHk/CyXDaNEg5ExIGgqnuoRh+2vcT7296n5yynKNn8eXFaDcGgDMoA6EmRyUcZg6rrJDjguOOqdyP+TGFHlORe7KyyS22sCHDWfmn57EhI4+MXEdzkVKQHBfKsJQ4+iZE0r9jJL3aRxAS4LtqxWQwERkYSWRgZIP3UW4rP3p14nqV4kwiNV3J5Jbmkl6QXlmu3F5e5zEUypGMa0gc1a9MwszOpBMQQVJEEjFBMQ3+bO6QpCBal/z9jgRQkQgqHvqK7Qonjnc0CXU5zTHEgxsOFh3kqT+f4od9P9A5ojPdo7vXWHlXr/Arl00hBJuC/aITtKC0nI0Z+WzIyGW98ypgX/bR5pjOsSEM6BTNhGGd6ZsYSZ8OEYQHtbz2ebPRTIwxplGVb5mt7GgicUkuNV29VCxnFWeRakmtLFvTleFDQx7iqp5XNebj1UuSggd5Y+jsiRMn8uWXXxIfH8/GjRub+iM1f2WFsPf3o/0CWVsd60NiHU1BySMciSCqY+37qIFd21m4bSEv/PUC5fZyJg+YzD/7/LPG5hZ/VGyxsikz33kFkMv6jDxSs4oqtydGB9MvMZLxg5PolxjJCR0iiQxpHp/NHwQaAwkMDiQuuGFD2rs2PRZaCsm35FNYXkhyZLKHIz2WJAUP8cbQ2UajkQkTJjBp0iSuu+46H366ZsRmdfQFpC53JIL0Px2jTJqCIGkonHi1IxG07dvgh7925uxk1h+zWJu1liHthzDjlBkkRSR58lN4VGm5jc3786v0Aew8VFh5X3/7yCD6JkRy2UkJ9E2Mom9CJDGhMnKpLymlCDGHEGIOIT4kvkmPLUnBQ7wxdPbQoUM5/fTT2bNnjw8+UTOhNRzZdbQ5aPdvUJYHKGjfD4ZOcvQLdDwFzEGNOlSZrYw31r/BOxvfIcwcxuPDH+ei5Iv8oumnQpnVxrYDBZV9AOsz8th+sACbMwPEhQXQLzGK809o7+gITogkPqJx34toWVpeUvhmOhzY4Nl9tusL5z9VZxFvDZ0talB02Nkv4PzJcw4aGJkEfS529gucAaGxHjvkn/v/5JEVj7A3fy8XJV/EtEHTvN7hV59ym50dBwsr+wA2ZOSxdX8BFptjLJ7oEDN9E6M4u2c8fRMddwO1iwjyqyQm/E/LSwo+4o2hs4VTeQns++PoraIH1jvWB0U6xvkfPsWRCGKSPT40RG5pLrPXzObznZ+TGJbI66NeZ1iHYR49hjtsds2urMIqfQCbM/Mpcw7GFh5kol9iJBOHd6m8AkiM9o8ObNG8tLykUM8Zvbd4a+jsVqm2ISQMZug4BM56yJEE2p8IRu/8F9Za8/Xur3lm1TPkleVxwwk3cEv/Wwg2ef9JZbtds/tIUZU+gI0Z+ZXDLIcGGDkhIZLrhnaib2IU/RIiSYoJaRHj8wjfa3lJwUe8MXR2q5Kz1+VW0RqGkEg509FRHBjm9VDSC9J5bMVj/J75O33j+vLGqDfoEdPDK8fSWpOWXcK69Fzn8wCOBFBYZgUgyGygT4dIrhrUkX7OJqAucWEYJQEIL5Gk4CHeGjp7/PjxLF++nMOHD5OYmMisWbO44YYbfPlRPaMkF3b/ejQR+HAIiQpWu5UPN3/Iy2tfxqAMTB88nXE9xmF0Z55hN2itycwrdTT/OPsA1qfnkVfieNApwGigV4cILj0pobIPoGubsGY39LJo3mTo7BbOb76b2oaQCAhzDCGRPMKRCNr08MmQ0ZsOb2LmHzPZmr2VEYkjePCUB2kX2riEdDC/tEofwIb0PI4UOcYcMhkUPduHV44F1Deh5vGAhPAUGTpb+FZ9Q0icfo8jEbgxhIQ3FZcXM3ftXD7a8hGxQbHMGTGHkUkjj7uD1m7X/LbzMGv35VbeDXSooOp4QGf1jKdfR0cfQA83xwMSoqlJUhCeU+cQEs6Hxo5jCAlv+zX9Vx5b8Rj7i/ZzZfcrmXzyZCICjn+eg/ScYu5esI6Vu7NRClLahDG8a1xlE1Dv9pEEB0gCEM2DV5OCUuo84D+AEXhLa/1Ute3PA2c6X4YA8VrrKG/GJDyorAD2/l8dQ0ic6fh9nENIeNvhksM89edTLNuzjJTIFD44/wNOij/puPejtWbxXxnMXLoJgKcu68uF/TsQFijnWqL58tr/XqWUEXgZGAWkA6uUUku11psrymit73Ipfwdw/H+Zoum4NYTEmdD2BL+cP9iu7Xy641PmrJlDqbWUSSdOYuIJExs06Up2kYUHPt3At5sOMLhzDLOv7E/HmBAvRC1E0/LmKc1gYKfWOhVAKTUPuBjYXEv58cDDXoxHHK86h5Do79EhJLwtNS+VWf83i78O/cXAtgOZMXQGXSK7NGhfP209yL2LNpBfUs795/fkxtOS5RZR0WJ4MykkAGkur9OBITUVVEp1AroAP9Wy/WbgZoCkJP8deKxF2fIlfHs/5O1zvI5Kgj6XOPsFPDuEhDdZbBbe3vA2b254k2BTMI8Me4RLul7SoCd9i8qsPPbVFj75cx8924Xz3xsG06t9y5lrWQgAb17j1/RXV9v9r+OARVrXPLWU1voNrfVArfXANm3aeCxAT/v222/p0aMHXbt25amnjn2yuqysjKuuuoquXbsyZMiQyoHujhw5wplnnklYWBiTJk1q4qir0Rr+9zzM/weERMMFs+GOv2DyehjzIpxwWbNJCGsOruGKL67glXWvMLLTSJZcsoRLu13aoISwZm8Oo1/8jXmr9nHL6cksmXSqJATRInnzSiEdcO1hTAQyayk7DviXF2PxusYMnR0UFMSjjz7Kxo0bfTtngtUCX94Faz+EEy6Hi1/2+wnoa5JXlsfza55n8Y7FJIQl8OrIVxmeMLxB+7JY7bz44w5eWb6T9pHBzLvpFIYkN4+kKERDeDMprAK6KaW6ABk4Kv6rqxdSSvUAooE/vBiL1zVm6OzQ0FCGDx/Ozp07fRG6Q3E2zL/GMSHNGdNhxHSfPETWGFprlu1dxlMrnyKnLId/9v4nt594OyHmhnUA7zhYwJT5a9mUmc/YkxOZcVHvFjnTmBCuvJYUtNZWpdQkYBmOW1Lf0VpvUko9AqzWWi91Fh0PzNMeerT66T+fZmv2Vk/sqlLPmJ7cN/i+Oss0ZujsuLiGzc7kMVnb4eMrIT8TLnsL+o31bTwNsL9wP4+vfJxf0n+hV0wvXhn5Cr1je9f/xhrY7Zp3/28PT3+7lbBAE69fezLn9mm64TaE8CWv3lCttf4a+LrauhnVXs/0ZgxNpTFDZ/tU6nJYcB0YA2DCl9CxeQ3EZ7Pb+Hjrx7z090sA3DPwHq7udTUmQ8P+a2fmljBt4Tr+b9cRzu4Zz1OX96NNeKAnQxbCr7W4p2zqO6P3lsYMne0zq9+Fr+52jDc0fh5Ed/JdLA2w5cgWZv0xi01HNjE8YTgPnfIQCWEJDdqX1polazP595KN2Oyapy7ry1WDOvo+aQvRxFpcUvCVxgyd3eTsNvju37DiZeg6Cq54B4Kaz500JdYSXl37Kh9s/oDIwEiePf1Zzu18boO/y9xiCw9+vpGv1u/n5E7RzLmyP51iQz0ctRDNgyQFD2nM0NkAnTt3Jj8/H4vFwueff853331XpZPaY8oKYPGNsP1bGHIrnPO41yaq8YbfM37n0RWPklGYweXdLueuk+8iMrDhYykt33aIexetJ7vIwj3n9uDWM1LkQTTRqsnQ2S1cle8mNw0+GecYvfT8p2HwTb4N7jgcKTnCs6uf5avUr+gc0ZkZQ2cwqN2gBu+v2GLlya+38t8Ve+kWH8bzV53ICQn+MVCfEN4gQ2eLqtJXwyfjwVoK/1gIXc/2dURu0Vrz+c7Pmb1mNkXlRdza/1Zu7HsjgcaGd/7+vS+HqQvWsftwETcO78K0c3vIMNZCOElSaA02fgqf3wZhbeGfX0B8T19H5Ja9+Xt55I9H+PPAnwyIH8CMoTNIiUpp8P7KbXbm/rSTuT/vpG14IB/fNIRhKT6+HVgIPyNJoSXTGkrzYMn1jlFMr/oQQv2/Eiy3lfPupnd5fd3rBBoDmTF0Bpd3uxyDavioLLuyCrlr/lrWp+dx2UkJPDymD5HB8iCaENVJUmip7HbHYHaledBvnGPcIpP/32+/9tBaZv0xi525Ozmn0zlMHzydNiENH+9Ka80Hf+zlyW+2EGQ28so/BjC6b3sPRixEyyJJoSWylUP2bsf0l0FRcOlrfj9kRYGlgP/89R8WbFtA29C2vHTWS4zoOKJR+zyQV8o9i9bx247DjOjRhmcu70d8hH8P8S2Er0lSaGnKSyA71TEhTnRnyDvg9wnhx70/8sTKJ8gqyeIfvf7BpJMmEWpu3HMCX6zL5KHPN2Kx2nnskhP4x5AkeRBNCDf43/RYzVh9Q2f/+uuvDBgwAJPJxKJFizwfQGk+HN4O2g5xXSE42vPH8KADRQeY/NNkpiyfQnRQNB9f8DH3Db6vUQkhr7icOz/5mzs++ZsucaF8Pfk0rjmlkyQEIdwkVwoe4s7Q2UlJSbz33ns899xzng+gMAvy08EUDDHJYArw/DE8xGa3sWD7Av7z13+w2W1MPXkq1/S+BrOhcR2//9txmGkL13G4sIypo7pz+4gUTEY57xHieEhS8BB3hs7u3LkzAAZPzl+sNeSlQ/FhCIx0jF9k8N977rfnbGfW/81i/eH1DG0/lH8P/TcdwzvW/8Y6lJbbeOqbrbz3f3tIaRPKG9cNo19ilIciFqJ1aXFJ4cATT1C2xbNDZwf26km7Bx6os4w7Q2d7nN0KOXscQ1eExkNEB7/tPyi1lvL6+td5b+N7hAeE8+RpT3JBlwsa3ayzIT2PKfP/ZldWEROGdWb6+T3lQTQhGqHFJQVfafJhsa1ljg5laxlEdvTr5w9W7F/Bo388yr6CfVyccjHTBk4jKqhxZ/JWm51Xlu/ixR93EBcWyH9vGMxp3fx3qlYhmosWlxTqO6P3FneGzvaYskLI2e1oOopNgcBw7xynkXJKc3hu9XMs3bWUpPAk3jrnLYa0H9Lo/e4+XMRd89eyNi2XMf078OjFJxAZIg+iCeEJLS4p+Io7Q2d7RHE25O5zTIoTmwxm/7vvXmvNl6lf8uyqZymwFHBT35u4ud/NBJkaF6vWmo9W7uPxr7ZgNipeHH8SY/p7KfEK0UpJUvAQd4bOXrVqFZdeeik5OTl88cUXPPzww2zatMm9A2gNBfuh8CAEhEF0F78c8jqtII1H/3iUP/b/Qb82/Zg5dCbdors1er+H8ku5d/F6lm/L4rRucTx7RX/aRfpfQhSiuZOhs5sDu81xdVCaCyExjj4EN8cBaqrvptxezn83/5dX176K0WBkyoApjO0+FqMH7oT6ZsN+HvhsA8UWGw+M7sW1p3TCIHMeCHFcZOjslsJW7uhQLi923F0UGu93dxhtPLyRmf83k2052zg76WzuH3w/bUPbNnq/+aXlzFyyiU//zqBfYiRzrjyRrvFhHohYCFEbSQr+rLwYjqSCtjmai4L96977ovIiXvr7JT7e8jFtgtvwwogXOLuTZ+Zp+L9dh7ln4XoO5Jdy59nduOOsrpjlQTQhvE6Sgr8qzXM8g6CMENsNAkJ8HVEVy9OW89iKxzhUfIirelzF5AGTCQto/Fl8abmNZ5dt4+3/7aZLXCiLbxvGiR39KxkK0ZJJUvA3WkPRIcjPBHMIxHRx3GnkJ7KKs3jyzyf5fu/3dI3qyuwRs+nfpr9H9r0xI4+pC9ay/WAh157SiftH9yQkQP6LCtGU5C/On2i7c8iKIxAUCVH+M2SFXdtZtH0RL6x5gTJbGXeedCcT+kzAbGz88wE2u+a1X3bxwg/biQ4J4L3rBzGiR7wHohZCHC9JCv7CbnXMgWApdEybGd7ebzqUd+XuYtYfs/j70N8MaTeEfw/9N50iOnlk3/uOFDN1wVpW783hgr7teeySE4gO9Z8rIyFaG0kKHjJx4kS+/PJL4uPj2bhx4/G92Vrq6FC2WSAqCUJivRPkcSqzlfHm+jd5e+PbhJpDeezUxxiTMsYjw3dorZm/Ko1HvtyM0aB44aoTufjEDjLEtRA+JknBQyZMmMCkSZO47rrrju+NZQWOKwSA2K4Q6B+3XG7L3sa0X6axJ38PFyZfyD2D7iEmKMYj+84qKOP+T9fzw5ZDDEuJ5bmx/ekQFeyRfQshGkeSgoecfvrp7Nmz5/jeVHQE8tIccx/EpPjNHMor969k8s+TCTWH8vrI1xmWMMxj+1626QAPfLqBgjIrMy7szYRhneVBNCH8SItLCr8t2M7htEKP7jOuYxinXdndczvU2nF3UdEhCAiHmM5g8I9/im92f8MD/3uAzhGdeXXkq7QLbeeR/RaUlvPIF5tZuCadPh0imHfViXRr658D+QnRmnm1JlJKnQf8BzACb2mtj5mjUil1JTAT0MA6rfXV3ozJ5+w2yN3reA4hJA4iE9wessLb3t/0Ps+tfo6T257Mi2e9SERAhEf2uzL1CHcvXEdmbgmTzuzKnWd3I8DkH59ZCFGV15KCUsoIvAyMAtKBVUqppVrrzS5lugH3A6dqrXOUUo2+D9GjZ/SeZrU450AogYgECG3jF3cY2bWd2atn88HmDxjVaRRPnvYkgcbGN2WVWW3M+X47b/yaSlJMCAtvHcrJnTzTLyGE8A5vXikMBnZqrVMBlFLzgIuBzS5lbgJe1lrnAGitD3kxHt+yFDk6lLXNMYdyUKSvIwKg3FbOg78/yDe7v+Hqnldz76B7PTKI3Zb9+dw1fy1bDxQwfnASD13Qi9BA/2giE0LUzpvX8AlAmsvrdOc6V92B7kqp35VSK5zNTcdQSt2slFqtlFqdlZXlpXAbZ/z48QwdOpRt27aRmJjI22+/fXRjSS4c3um4Kojr7jcJodBSyG0/3sY3u79hyoApTB88vdEJwWbXvP7LLi6e+zuHCy28M2EgT17WVxKCEM2EN/9Sa2oXqT5OtwnoBowAEoHflFInaK1zq7xJ6zeAN8AxdLbnQ228Tz755NiVWjvmPyjY7xyyIhk88ASwJ2QVZ3H7j7ezM2cnTwx/gotSLmr0PtOyi7l74Tr+3J3NuX3a8sSlfYkN8487qoQQ7vFmUkgHOrq8TgQyayizQmtdDuxWSm3DkSRWeTGupqHtkJsGJdkQFO14KM3gH52ru/N2c9sPt5Fdms3cs+dyasKpjdqf1ppFa9KZ9YWjZfC5sf25fECCPIgmRDPkzaSwCuimlOoCZADjgOp3Fn0OjAfeU0rF4WhOSvViTE3DZoWcVEc/Qng7CGvnFx3KAOuy1jHpx0kYlIF3z3uXPrF9GrW/I4Vl3P/pBr7bfJDBXWKYPbY/HWP8a0RXIYT7vJYUtNZWpdQkYBmOW1Lf0VpvUko9AqzWWi91bjtHKbUZsAH3aK2PNPB4/nFmWl4K2bsck+NEdXLMlOYj1WfVW562nHt+uYe9N4TrAAAgAElEQVT4kHheG/kaHSM61vJO9/y45SD3LV5PfomVB0f34obhXeRBNCGaOa/2/mmtvwa+rrZuhsuyBqY6fxosKCiII0eOEBsb69vEUJrvnANBQVw3CAj1WShaa44cOUJQkGMe48XbF/PIikfoHdObuWfPJTa44eMrFZVZeeyrzXzyZxq92kfw4Y396dnOM880CCF8q0XcEpKYmEh6ejo+vTOprBBKchwdyaFxkLPPd7E4BQUFkZCQwKtrX+WVda8wPGE4s8+YTYi54c07a/Zmc9f8daTlFHPrGSncNaobgSb/GN5bCNF4LSIpmM1munTp4puD223w3UOw4hXoOgqueAeC/OOs2Wq38tiKx1i8YzEXp1zMw8Mexmxo2N1PFqudF37Yzmu/7KJDVDDzbx7K4C7yIJoQLU2LSAo+U5oPi2+EHctgyG1wzmNg9I+vtMRawr2/3svytOXc1Pcm7jjpjgY3rW0/WMCUeWvZvD+fqwZ25N8X9SZMnjsQokWSv+yGyt0HH18FWdvggjkw6AZfR1QptzSXST9NYn3Weh4c8iDjeo5r0H7sds07v+/mmWXbCA808ca1J3NOH88MkCeE8E+SFBoibRXMG+8Yy+iaRZBylq8jqpRRmMGt399KZmEmc0bMYWSnkQ3az5HCMiZ9/Dd/pB5hZK+2PHV5X+LkQTQhWjxJCsdrwyL4/HaIaA8TvoI2PXwdUaVt2du47YfbKLWV8sY5b3By25MbtB+tNfcuWs+afTk8c3k/xg5M9I/bfYUQXufWI7ZKqcVKqQuU8pMxnn1Ba1j+FCy+ARIGwI0/+VVCWLl/JRO+nYBBGfjgvA8anBAAFqxO48eth7jvvJ5cOaijJAQhWhF3K/lXcTyNvEMp9ZRSqqcXY/I/5aWODuXlT0L/8XDdEgj1j3mUwTExzq0/3Eq70HZ8OPpDukZ3bfC+0rKLeeSLzZySHMP1wzp7LkghRLPgVvOR1voH4AelVCSOYSm+V0qlAW8CHzrHLmqZCg/BvKshfRWc/TAMv8tvhqwAz06MY7drpi1ch1KK58b2l6eThWiF3O5TUErFAtcA1wJ/Ax8Bw4F/4hjltOU5uNlxh1FRFlz5AfS+2NcRVfLGxDjv/L6blbuzeebyfiRGy/hFQrRGbiUFpdSnQE/gv8BFWuv9zk3zlVKrvRWcT23/DhZNdAxVcf3Xjn4EP+GNiXF2HirgmWXbGNkrnrEDEz0UqRCiuXH3SmGu1vqnmjZorQd6MB7f0xpWvg7L7oe2fWD8fMc8yn6i0FLIlOVTWLl/JVMGTGHiCRMb3RFcbrMzdcE6QgOMPHFZX+lYFqIVc7ejuZdSKqrihVIqWil1u5di8h1bOXx1N3x7H3Q/H67/1q8SQlZxFtcvu541B9bwxPAnuKHvDR6pwF/5eRfr0/N4/NK+xIcHeSBSIURz5W5SuMl1NjTnnMo3eSckHynJhY/Gwuq34dTJcNWHEBjm66gq7c7bzbXfXMve/L3MPXuuR2ZKA9iQnsdLP+3gkhM7MLpve4/sUwjRfLnbfGRQSinnUNcopYxAgPfCamLZux0dytm7YMxLMOA6X0dURZWJcc59lz5xjZsYp0JpuY2pC9YSFxbIrDEneGSfQojmzd2ksAxYoJR6Dcc8y7cC33otqqa09w/HLafaDtd+Dl1O83VEVfyS9gvTfplGm5A2vD7y9UZPjONq9nfb2HGokPcnDiYyxD/mjhZC+Ja7SeE+4BbgNkAB3wFveSuoJrP2E/jiTojsCP9YCLEpvo6oioqJcXrF9OLls19u1MQ41a1IPcJb/9vNNackcUb3Nh7brxCieXP34TU7jqeaX/VuOE3EboefH4PfZkPn0xzPIPhw2szqtNa8tu41j02MU11hmZVpC9eRFBPCA6N7eWy/Qojmz93nFLoBTwK9gcrbU7TWyV6Ky3ssxfDZLbBlqaPv4II5jtnS/IQnJ8apzeNfbSYjt4SFtwwlJEDGRBRCHOVujfAu8DDwPHAmcD2OZqTmpeAAfDIOMtc6JsQZOsmvhqzw5MQ4tflp60E++TONW85IZmBn/7k6EkL4B3eTQrDW+kfnHUh7gZlKqd9wJIrmYf96R0IoyYVxH0PP0b6OqApPTYxTl5wiC/ct3kCPtuFMHdXd4/sXQjR/7iaFUuew2TuUUpOADCDee2F5Qfqfjt8Tv4X2/XwbSzWemhinPv9espHcYgvvXT+IQFPjhsUQQrRM7iaFKUAIcCfwKI4mpH96KyivGHQj9B0LQZG+jqQKT02MU5+l6zL5cv1+pp3TnT4d/Os7EEL4j3qTgvNBtSu11vcAhTj6E5onP0sIK/evZMrPUwg1h/LBeR80ah6EuhzML+Xfn2/kxI5R3HqGf912K4TwL/UOc6G1tgEnKxklzaM8OTFOXbTW3Ld4PWVWG3Ou7I/J2HonzxNC1M/d5qO/gSVKqYVAUcVKrfWnXomqhftg0wc8u/pZj0yMU595q9JYvi2LmRf1JrmN/4zlJITwT+4mhRjgCHCWyzoNSFI4Dt6YGKcu+44U8+iXmzm1ayzXDe3steMIIVoOd59obr79CH7CdWKc8T3Hc9+g+xo9MU5dbM6pNY1K8ewVMrWmEMI97j7R/C6OK4MqtNYTPR5RC+SNiXHq887/dvPnnmyeG9ufDlHBXj2WEKLlcLfX8UvgK+fPj0AEjjuR6qSUOk8ptU0ptVMpNb2G7ROUUllKqbXOnxuPJ/jmwFsT49Rl+8ECnl22jXN6t+XyAf4zSZAQwv+523y02PW1UuoT4Ie63uO8lfVlYBSQDqxSSi3VWm+uVnS+1nqS+yE3H7vzdnPbD7eRXZrN3LPncmrCqV4/pmNqzbWEB5lkak0hxHFr6P2J3YCkesoMBnZqrVO11hZgHnBxA4/X7KzLWsd131xHibWEd899t0kSAsBLP+1kY0Y+j1/al7gw73ViCyFaJreSglKqQCmVX/EDfIFjjoW6JABpLq/Tneuqu1wptV4ptUgpVeMMMkqpm5VSq5VSq7OystwJ2ad+SfuFG5fdSHhAOB+e/6HHZkqrz7q0XF7+eSeXnZTAeSe0a5JjCiFaFreSgtY6XGsd4fLTvXqTUg1qareo3ln9BdBZa90PR3PU+7Uc/w2t9UCt9cA2bfx7QpjF2xdz5893khKVwn/P/69HZ0qrS8XUmvHhgTw8pmmSkBCi5XH3SuFSpVSky+sopdQl9bwtHXCtEROBTNcCWusjWusy58s3Ae8M/NMEtNa8uu5VZv4xk6EdhvLOue94dKa0+jzz7TZ2ZRXx7BX9iQz2n/khhBDNi7t9Cg9rrfMqXmitc6l/2OxVQDelVBelVAAwDljqWkAp1d7l5Rhgi5vx+BWr3cqsP2bxytpXGJMyhpfOesmjM6XV5/92Head33dz3dBODO8W12THFUK0PO4+0VxT8qjzvVprq3OY7WWAEXhHa71JKfUIsFprvRS4Uyk1BrAC2cAEtyP3E00xMU5dCkrLuWfherrEhTL9/J5NdlwhRMvkblJYrZSag+MWUw3cAayp701a66+Br6utm+GyfD9wv9vR+hnXiXEeGPIA43uOb/IYHv1yM/vzSlh02zCZWlMI0WjuNh/dAViA+cACoAT4l7eCag4yCjO49ptr2XJkC3NGzPFJQvhh80EWrE7n1jNSGJAU3eTHF0K0PO4+vFYEHPNEcmvVVBPj1CW7yML0TzfQq30EU0bK1JpCCM9w9+6j75VSUS6vo5VSy7wXlv9auX8lE76dgEEZ+OC8D3ySELTWPPT5BvJKLMy5sj8BJpkjQQjhGe7WJnHOO44A0Frn0NzmaPaAppoYpz5L12Xy9YYD3DWqO73ae28uBiFE6+NuUrArpSqHtVBKdaaGUVNbsg82fcC9v95L/zb9ef/892kX6psnhg/kOabWPLlTNLecLlNrCiE8y93bVR4E/qeU+sX5+nTgZu+E5F/s2s6c1XN4f/P7TTIxTl201ty7eD3lNs3ssf0xyhwJQggPc7ej+Vul1EAciWAtsATHHUgtWlNPjFOfj1bu49ftWTx6cR86x4X6LA4hRMvl7iQ7NwKTcQxVsRY4BfiDqtNztiiuE+NMHjCZG07w/jwIddlzuIjHv9rCad3iuOaUTj6LQwjRsrnbfDQZGASs0FqfqZTqCczyXli+lVWcxe0/3s7OnJ08PvxxxqSM8Wk8FVNrmoyKZ67oJ3MkiGZLa409Px9rdja23Fyw20Hrio2VZRwLVFlfucJj5au9D+1+Wa/FRI3bKzYEn3QSgcnJeJO7SaFUa12qlEIpFai13qqU6uHVyHzEdWKcl85+ieEJw30dEm/+lsrqvTk8f1V/2kfK1JrCf2ibDVtuLrbsbKzZOdhycrDlZDsq/eyK5RzH9pxsbDm5YLX6Ouxmq93Mh/0mKaQ7n1P4HPheKZVDtRFPW4J1WeuY9OMkDMrAu+e+22TzINRl64F85ny3nfP6tOOSE2VqTeFd2mLBmpOLLSf7aEVfUaHXsGzLy3M5m63KEBGBKToaY0wM5o4dCe7fD2N0DMaYaEwxMRijolAmZxVUcfVbeRVc7XXly+rra3vf8Zav4X0eP4ab5au97+hLhSGicrBqr3G3o/lS5+JMpdTPQCTwrdei8oFf0n5h2i/TaBPShtdHvt5k8yDUxWK1c9f8dUQEm3j80hOk2UgcN3tpqctZfP0Vvb2goOYdKYUxKgpjTAym6GgCu3Y9Wrm7VvQxMRijozFFR6PMMoR7c3TcI6hprX+pv1Tzsnj7Yh5Z8Qi9Ynrx8tkvN+k8CHV58ccdbNmfz5vXDSRWptZs9bTW2IuKXSr3WppoKpdz0MXFNe/MZKo8izfGRBPc4YTK5cqKPjrqaEUfGYky+u7OO9F0WvWwmlprXlv/Gq+sfYVTE05lzhlzmnQehLr8tS+HV5bv5IqTExnVu62vwxFeoO12Z6erSzt8Tk6dFb22WGrclwoMrDyLN8bEENClM6bomGMqelOMY7shPFyuPEWNWm1SsNqtPL7ycRZtX8SYlDHMHDYTs8E/LndLLDamLVhH+8hgZlzU29fhCDcd2+nqcjZfw1m8LScHbLYa92UICXFW6DGY28QT1KNnrc01puhoVEiIVPLCI1plUvD1xDj1efrbraQeLuLjm4YQEeQfiUrUTmtN9vvvk/X8C+iyshrLVOl0TUoi+MT+VSv3ahW9IVCaC4VvtLqk4A8T49Tl952Hee//9jBhWGeGpcjUmv7OVljE/gcfpGDZMsLOOIPQ006rbKKpbK6JipJOV9FstKqkkFGYwa3f30pmYSazR8xmVKdRvg6pivzScu5ZuI7kNqHcd55MrenvynbsIP3OyVj27iX+nmnETJzoV1ecQjREq0kK/jAxTn1mLd3MwYIyFt82jOAAudPDn+V9+RX7//1vDKGhJL37LqFDBvs6JCE8otUkhbWH1lZOjOOreRDqsmzTARb/lc4dZ3XlxI5R9b9B+IS2WDj4zLPkfPghwQMGkPD885jbtrqpRUQL1mqSwlU9r2J08mjCA8J9HcoxDheW8cCnG+jTIYI7zurm63BELcoPHCBjyl2UrF1LzD+vI37aNOkrEC1Oq0kKgF8mBK01D362gYJSKx/fdKJMremnilasIGPq3ejSUhKen0PE+ef7OiQhvEJqIB/77O8Mlm06yN3ndKdHO/9LWq2dtts5/Mab7Jt4A8boaDovXCAJQbRorepKwd9k5pbw8JJNDOoczY2neXfkQ3H8bPn5ZE6/n8KffiJi9Pm0f/RRDKEyuZFo2SQp+Ijdrrl30XpsWvOcTK3pd0q3biX9zsmUZ2bS9oEHiL72GrndVLQK0nzkIx+u3Mv/dh7mwQt60SlWzj79Se5nn7PnqnHo0lI6ffA+MdddKwlBtBpypeADqVmFPPH1Fs7o3oarByf5OhzhZC8r4+DjT5C7YAEhQ4aQMPs5THHyVLloXSQpNDGrzc7dC9cRaDLy9OUytaa/sKRnkDFlCqUbNxJ70420mTz56AQwQrQi8r++ib3+ayp/78vlP+NOpF1kkK/DEUDhb7+ROe0etM1G4tyXCB850tchCeEzXu1TUEqdp5TappTaqZSaXke5K5RSWik10Jvx+NrmzHxe+GE7F/Rtz5j+HXwdTqun7Xay5r5M2s23YGrXji6LFkpCEK2e164UlFJG4GVgFJAOrFJKLdVab65WLhy4E1jprVj8QZnVxtQFa4kKCeDRS2RqTV+z5uSQee99FP32G5EXj6HdzJkYgoN9HZYQPufNK4XBwE6tdarW2gLMAy6uodyjwDNAqRdj8bkXftjB1gMFPH15X2JCA3wdTqtWsmEjey6/guIVK2g382HaP/WUJAQhnLyZFBKANJfX6c51lZRSJwEdtdZf1rUjpdTNSqnVSqnVWVlZno/Uy9bszeb1X3Zx1cCOnNVTptb0Fa01OfMXsPfqq9FoOn30IdHjxslVmxAuvNnRXNNfmq7cqJQBeB6YUN+OtNZvAG8ADBw4UNdT3K8UW6xMdU6t+dCFvXwdTqtlLynhwCOPkvfZZ4SeeiodnnsWU3S0r8MSwu94MymkAx1dXicCmS6vw4ETgOXOM7V2wFKl1Bit9WovxtWknvx6K/uyi/nkplMIl6k1fcKybx/pd06mbOtW4m6/nbh/3Y4yynwVQtTEm0lhFdBNKdUFyADGAVdXbNRa5wGVTwYppZYD01pSQvh1exb/XbGXG4Z34ZTkWF+H0yoV/PQTmfdNB4OBjq+/RtgZZ/g6JCH8mtf6FLTWVmASsAzYAizQWm9SSj2ilBrjreP6i7zicu5dtJ6u8WHcc24PX4fT6mirlUNznif99n8R0LEjXRYvkoQghBu8+vCa1vpr4Otq62bUUnaEN2NpajO/2ERWYRlvXHcyQWZpqmhK1iNHyLh7GsUrVhA1dixtH3oQQ2Cgr8MSolmQJ5q94JsN+/ns7wwmn92NfokytWZTKv77bzKm3IUtN5f2jz9O1OWX+TokIZoVSQoellVQxgOfbaBvQiSTzvK/uaBbKq01OR9+xMGnn8bcrh2dP/mYoN69fR2WEM2OJAUP0lpz/6cbKLLYmHNlf8xGGZm8KdiLitg/42Hyv/qKsBEj6PD0UxgjI30dlhDNkiQFD1q0Jp0fthzkoQt60a2tTK3ZFMpSU0m/804sqbtpM2UKsTffhDJIMhaioSQpeEh6TjGPfLGZwV1imHhqF1+H0yrkf7uM/Q88gAoMJOmtNwkdNszXIQnR7ElS8AC7XXPPwvXYtWb22P4YZGpNr9Ll5Rya8zzZ775LUP9+JL7wAub27X0dlhAtgiQFD3j/jz38kXqEpy7rS8eYEF+H06KVHzpExtSplKxeQ/TVV9N2+n2oABlgUAhPkaTQSDsPFfLUN1s5q2c8Vw3qWP8bRIMVr1pF+tSp2AuL6PDsM0RedJGvQxKixZGk0AgVU2sGBxh56rK+Mtqml2ityX73PQ7Nnk1Ax44kvf02Qd27+zosIVokSQqN8OryXaxLy2Xu1ScRHyFTa3qDrbCQ/Q88SMF33xE+ahTtn3wCY1iYr8MSosWSpNBAGzPy+M+PO7iofwcu7CdTa3pD6fbtZNw5GUtaGvH33EPMxOvlakwIL5Ok0ACl5Y6pNWNCA3j04j6+DqdFyvviS/bPmIEhNJSkd98hdPBgX4ckRKsgSaEBnv9+O9sPFvLu9YOICpE7XzxJWywcfPoZcj76iOCTTybh+TmY4+N9HZYQrYYkheO0ak82b/yWyvjBSZzZQyorTyrfv5+MKXdRsm4dMRMmEH/3VJRZJiYSoilJUjgORWVW7l6wjsToYB68QKbW9KSiP/4gY+rd6LIyEl54nojzzvN1SEK0SjJIzHF4/OstpOUUM3vsiYQFSj71BG23c/i119l3w40YY2PovGihJAQhfEhqNjct33aIj1fu4+bTkxncJcbX4bQItrw8MqffT+HPPxMxejTtH30EQ2ior8MSolWTpOCG3GIL9y1eT/e2YUwdJQ9NeULpli2k3zmZ8v37afvgg0Rf8w+53VQIPyBJwQ0zlmziSKGFt/85SKbW9IDcTz/jwKxZGKOi6PTBB4QMOMnXIQkhnCQp1OOr9ftZui6TqaO6c0KCTNzSGPayMg4+9ji5CxcSMmQICXNmY4qN9XVYQggXkhTqcCi/lIc+30D/xEhuH5Hi63CaNUt6BhmTJ1O6aROxN99MmzvvQJnkv58Q/kb+KmuhtWb6pxsottiYfeWJmGRqzQYr/PVXMu65F+x2El+eS/jZZ/s6JCFELSQp1GLB6jR+2nqIGRf2pmu8DMDWENpm4/DLr3D41VcJ7N6dxBf/Q0CnTr4OSwhRB0kKNUjLdkytOTQ5lgnDOvs6nGbJmpND5j33UvS//xF58cW0m/kwhuBgX4clhKiHJIVq7HbNtIXrUErx7Nh+MrVmA5Rs2ED65MnYsg7TbtYsoq4cK7ebCtFMSEN5Ne/8vpuVu7OZcVFvEqNlas3jobUmZ9589l79DwA6ffwR0VddKQlBiGZErhRc7DhYwDPLtjGyVzxjT070dTjNir2khAMzZ5G3ZAmhw4fT4dlnMEVH+zosIcRxkqTgVG6zM3XBOkIDjDwhU2seF8vevaTfOZmy7duJ+9e/iLv9NpRRHvITojmSpOD08s872ZCRx6v/GEB8uEyt6a6CH38k877pKKORjq+/Rtjpp/s6JCFEI3i1T0EpdZ5SaptSaqdSanoN229VSm1QSq1VSv1PKdXbm/HUZkN6HnN/2sklJ3bg/L7tfRFCs6OtVg7Nnk36vyYR0KkTnRcvloQgRAvgtaSglDICLwPnA72B8TVU+h9rrftqrU8EngHmeCue2pSW27hrwVriwgKZNeaEpj58s2Q9fJh9N9zIkTffIurKK+n08UcEJCb4OiwhhAd4s/loMLBTa50KoJSaB1wMbK4ooLXOdykfCmgvxlOj55ZtY+ehQj6YOJjIEJnlqz7Ff/1NxpQp2PLyaP/EE0RddqmvQxJCeJA3k0ICkObyOh0YUr2QUupfwFQgADirph0ppW4GbgZISkryWIArUo/w9u+7ueaUJE7v3sZj+22JtNbk/PdDDj7zDOb27ek87xOCesnsc0K0NN7sU6jp9p1jrgS01i9rrVOA+4CHatqR1voNrfVArfXANm08U3kXllmZtnAdSTEhPDBaKre62IuKyLx7GgefeIKw006jy+JFkhCEaKG8eaWQDnR0eZ0IZNZRfh7wqhfjqeKxLzeTkVvCwluGEhIgN2HVpiw1lfQ77sSyezdt7rqL2JtuRBnkmUchWipv/nWvAroppboopQKAccBS1wJKqW4uLy8Adngxnko/bT3IvFVp3HJ6CgM7y9Satcn/9lv2XDEWW04OSW+/RdwtN0tCEKKF89opstbaqpSaBCwDjMA7WutNSqlHgNVa66XAJKXUSKAcyAH+6a14KuQUWbhv8QZ6tgvnrlHd6n9DC6e1xnroEJZduyjblUpZ6i4su1IpS03Fdvgwwf37k/CfFzC3a+frUIUQTcCr7SZa66+Br6utm+GyPNmbx6/JQ0s2klts4b3rBxFoaj1P3WqbjfK0NMpSUynbdbTit6SmYi8srCxniIggMDmZsDNOJ6h3b6LHjkUFBPgwciFEU2pVjelL12Xy1fr93HNuD/p0aJlTa9rLyrDs3l214t+1C8uePejy8spypjZtCEhJIXLMGAK6phCYnEJgSjLGuDgZ4kOIVqzVJIWD+aX8+/ONnJQUxS2nJ/s6nEazFRQc2+Szaxfl6emgnTd5GQyYExMJTE4m9PTTKiv+gORkjBERvv0AQgi/1GqSwkcr91FmtTF7bP9mM7Wm1hprVhaW6k0+u3ZhzcqqLKfMZgK6dCHohD5EjhnjqPhTUgjo3BlDYKAPP4EQorlpNUnhrpHdGN23Hclt/G9qTW2zUZ6R4aj4U1Mp2+Wo+MtSU7EXFFSWM4SGEpCSQuippxKQkkxgSgqBycmYExNRplbzTymE8KJWU5MopejZzrdNJnaLBcuePY6Kf+cuLKmO5h/Lnj3osrLKcsa4OAKTk4m48IKjTT4pKZji46W9XwjhVa0mKTQlW2Hh0TP+iop/1y4saWlgtzsKKYW5QwcCUpIJHTbM2dafQmByF4xRUb79AEKIVkuSQgNprbEdOVK14nf+th48eLSg2UxApyQCe/QgfPT5R8/8u3SRiexFk9JaozVou0bbNXbnb213zE2utcZuc67TLuurldXa+dqmsWuX9S7l7K77sFUsV2yj8hji+CT2jCYuMdyrx5CkUA9tt1OemVnjw132vLzKciokxHGXzylDHGf8zjP/gI6JKLOMvtqc2O0aq8WGrdyOtdyO1WLDWm53vHYuWy12bOXO5Ypt5fajFWIdFaa2V2yrrcK0V6k4XStS131oDXabvWpFXVmGqseyaaQObv7OuLqHJIWmoi0WLPv2VTnjL0vdhSV1N7q0tLKcMTqagJRkIs4992iTT9cUTO3aSXu/F2itsVkrKmE71nIbVktFRXx0uWK9rbymbXZslqMVeE0VvmsZu60RtacCg0GhnD8GxdHlyvXOMkphMDrXKZf1LmUNRoUyGaqUMxiqlq1yLKOhyjEd+6oWQ+WxDCjntqrHpdqxqq2vUh7n/pyxqmqf0fVzqxri9tx/lVbBaPb+nZOtLinYi4ooS91dteLflYpl3z6w2SrLmTq0JzA5hdBBg46e+aektPrJ6O02e+WZsrXcWbm6VMy2Gitp55l2bdtqqeRtFjtWq73Bs2wog8JkNmAKMGA0GzCZjZgCDJjMBoxmIyEhJsd2sxGjc31FGdfyRpPB+T6j470uyxXrjQEGTCYDyiDVnGjeWk1SWP2fpWxcWwJWq8vaeJQ5AdVpJKpbACrAjAoIQJkDoGLgNwuwFdhqA7Y3feC+pHVl5V9RqdvtDT+LdlS0zko5wFi5bAowEhRiwmgOwOSyvp44P4EAAAfTSURBVLJMRcVbfdnl/UaX5YqK29hMnkcRwp+0mqQQHBtGZEgBhrAwDKGhGMNCMYSEHK38RQ1U5Zl1rWfTrmfQtZxpV2yT5jUh/F+rSQp9rjmLPtf4OgohhPBvcposhBCikiQFIYQQlSQpCCGEqCRJQQghRCVJCkIIISpJUhBCCFFJkoIQQohKkhSEEEJUUs1t+FqlVBawt4FvjwMOezAcT5G4jo/Edfz8NTaJ6/g0Jq5OWus29RVqdkmhMZRSq7XWA30dR3US1/GRuI6fv8YmcR2fpohLmo+EEEJUkqQghBCiUmtLCm/4OoBaSFzHR+I6fv4am8R1fLweV6vqUxBCCFG31nalIIQQog4tOikopZ5VSm1VSq1XSn2mlIqqpdx5SqltSqmdSqnpTRDXWKXUJqWUXSlV650ESqk9SqkNSqm1SqnVfhRXU39fMUqp75VSO5y/a5wTVSllc35Xa5VSS70YT52fXykVqJSa79y+UinV2VuxHGdcE5RSWS7f0Y1NFNc7SqlDSqmNtWxXSqkXnXGvV0oN8JO4Riil8ly+rxlNEFNHpdTPSqktzr/FyTWU8e73pbVusT/AOYDJufw08HQNZYzALiAZCADWAb29HFcvoAewHBhYR7k9QFwTfl/1xuWj7+sZYLpzeXpN/47ObYVN8B3V+/mB24HXnMvjgPl+EtcEYG5T/X9yOe7pwABgYy3bRwPfAAo4BVjpJ3GNAL5s4u+qPTDAuRyOYw7g6v+OXv2+WvSVgtb6O611xaTMK4DEGooNBnZqrVO11hZgHnCxl+PaorXe5s1jNISbcTX59+Xc//vO5feBS7x8vLq48/ld410EnK28PxepL/5d3KK1/hXIrqPIxcAH2mEFEKWUau8HcTU5rfV+rfVfzuUCYAuQUK2YV7+vFp0UqpmII7tWlwCkubxO59h/BF/RwHdKqTVKqZt9HYyTL76vtlrr/eD4owHiaykXpJRarZRaoZTyVuJw5/NXlnGelOQBsV6K53jiArjc2eSwSCnV0csxucuf/waHKqXWKaW+UUr1acoDO5sdTwJWVtvk1e+r2c/RrJT6AWhXw6YHtdZLnGUeBKzARzXtooZ1jb4ly5243HCq1jpTKRUPfK+U2uo8u/FlXE3+fR3HbpKc31cy8JNSaoPWeldjY6vGnc/vle+oHu4c8wvgE611mVLqVhxXM2d5OS53+OL7csdfOIaGKFRKjQY+B7o1xYGVUmHAYmCK1jq/+uYa3uKx76vZJwWt9ci6tiul/glcCJytnQ1y1aQDrmdMiUCmt+Nycx+Zzt+HlFKf4WgiaFRS8EBcTf59KaUOKqXaa633Oy+TD9Wyj4rvK1UptRzHWZank4I7n7+iTLpSygRE4v1minrj0lofcXn5Jo5+Nn/glf9TjeVaGWutv1ZKvaKUitNae3VMJKWUGUdC+Ehr/WkNRbz6fbXo5iOl1HnAffx/e/cPItUVxXH8+1NYE2IgcTfkT5U/LggipkhicLdJa2FIELYIxEILC+tAQJDEIoWQLsWCSZvCPxHRhRQasqQImsJ112wRTWGjIhZCiFHRk+Lc9xh15zFZnTcT/X1g2Mfse7PnXWbnzLv33XNha0T83WW3M8C4pDckjZADg327c6VXkp6T9Hy1TQ6aL3mXRMsG0V7HgO1lezvw0BWNpBclrSrbY8AE8HsfYunl/Dvj3Qac6vKFpNW4Huh33kr2Vw+DY8Cn5a6a94EbVXfhIEl6pRoLkvQe+Xl5vfmoR/6bAr4FFiPi6y679be92hxZb/sBXCD73s6WR3VHyGvATMd+W8hR/otkN0q/4/qIzPa3gKvAjw/GRd5FMlce54clrgG11yhwEvij/FxTnn8HOFC2NwPzpb3mgR19jOeh8we+JL98ADwDHCzvv9PAm/1uox7j+qq8l+aAn4B1LcX1PXAZuFPeXzuAXcCu8nsB35S452m4I6/luHZ3tNevwOYWYpoku4LOdXxubWmzvTyj2czMak9095GZmf03TgpmZlZzUjAzs5qTgpmZ1ZwUzMys5qRgtgRJfz3i8YfKzGokrZY0LeliqXw5K2mTpJGy/b+fRGpPDicFs8es1MhZGRF/lqcOkDOaxyNiPVmtdCyycN1JYGoggZotwUnBrEGZNbpf0oJybYup8vyKUvbgvKTjkmYkbSuHfUKZdS3pLWATsCci7kGW4YiIE2Xfo2V/s6Hgy1azZh8DbwMbgTHgjKRZsozG68AGsmrrIvBdOWaCnC0LsB44GxF3u7z+AvBuXyI3WwZfKZg1myQri96NiKvAz+SH+CRwMCLuRcQVsmxE5VXgWi8vXpLF7arOldmgOSmYNeu2OE7Tojk3yfpHkLVzNkpq+l9bBfyzjNjMHjsnBbNms8CUpJWSXiKXcDwN/EIuWLNC0svk0o2VRWAtQOR6Dr8BX3RU3ByX9GHZHgWuRcSdtk7IrImTglmzH8iKlXPAKeCz0l10mKysuQBMk6tj3SjHnOD+JLGTXEDogqR5ci2Dqv79B8BMf0/BrHeukmq2TJJWR67KNUpePUxExBVJz5JjDBMNA8zVaxwBPo8hXLPbnk6++8hs+Y5LegEYAfaVKwgi4qakveS6uZe6HVwWwznqhGDDxFcKZmZW85iCmZnVnBTMzKzmpGBmZjUnBTMzqzkpmJlZzUnBzMxq/wKtZKSleSus2AAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "# plot误差曲线\n",
    "test_means = grid.cv_results_['mean_test_score']\n",
    "test_stds = grid.cv_results_['std_test_score']\n",
    "\n",
    "# plot results\n",
    "n_Cs = len(Cs)\n",
    "number_gamms = len(gammas)\n",
    "\n",
    "test_scores = np.array(test_means).reshape(n_Cs, number_gamms)\n",
    "test_stds = np.array(test_stds).reshape(n_Cs, number_gamms)\n",
    "\n",
    "x_axis = np.log10(Cs)\n",
    "for i, value in enumerate(gammas):\n",
    "    plt.plot(x_axis, test_scores[:, i], label=gammas[i])\n",
    "\n",
    "\n",
    "plt.legend()\n",
    "plt.xlabel( 'log(C)' )                                                                                                      \n",
    "plt.ylabel( 'accuracy' )\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
