{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 机器学习建模"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 导包\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "# 预处理\n",
    "from sklearn.preprocessing import MinMaxScaler,StandardScaler\n",
    "from sklearn.preprocessing import LabelEncoder,OneHotEncoder\n",
    "from sklearn.preprocessing import Normalizer\n",
    "from sklearn.discriminant_analysis import LinearDiscriminantAnalysis\n",
    "from sklearn.decomposition import PCA\n",
    "# 建模\n",
    "from sklearn.model_selection import train_test_split\n",
    "\n",
    "# 把Graphviz加入path环境变量\n",
    "# import os\n",
    "# os.environ['PATH'] += os.pathsep + 'C:\\Program Files (x86)\\Graphviz2.38\\bin'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(       satisfaction_level  last_evaluation  number_project  \\\n",
       " 0                0.318681         0.265625             0.0   \n",
       " 1                0.780220         0.781250             0.6   \n",
       " 2                0.021978         0.812500             1.0   \n",
       " 3                0.692308         0.796875             0.6   \n",
       " 4                0.307692         0.250000             0.0   \n",
       " ...                   ...              ...             ...   \n",
       " 14994            0.340659         0.328125             0.0   \n",
       " 14995            0.307692         0.187500             0.0   \n",
       " 14996            0.307692         0.265625             0.0   \n",
       " 14997            0.021978         0.937500             0.8   \n",
       " 14998            0.307692         0.250000             0.0   \n",
       " \n",
       "        average_monthly_hours  time_spend_company  Work_accident  \\\n",
       " 0                   0.285047               0.125            0.0   \n",
       " 1                   0.775701               0.500            0.0   \n",
       " 2                   0.822430               0.250            0.0   \n",
       " 3                   0.593458               0.375            0.0   \n",
       " 4                   0.294393               0.125            0.0   \n",
       " ...                      ...                 ...            ...   \n",
       " 14994               0.257009               0.125            0.0   \n",
       " 14995               0.299065               0.125            0.0   \n",
       " 14996               0.219626               0.125            0.0   \n",
       " 14997               0.859813               0.250            0.0   \n",
       " 14998               0.289720               0.125            0.0   \n",
       " \n",
       "        promotion_last_5years  department  salary  \n",
       " 0                        0.0    0.777778     0.0  \n",
       " 1                        0.0    0.777778     0.5  \n",
       " 2                        0.0    0.777778     0.5  \n",
       " 3                        0.0    0.777778     0.0  \n",
       " 4                        0.0    0.777778     0.0  \n",
       " ...                      ...         ...     ...  \n",
       " 14994                    0.0    0.888889     0.0  \n",
       " 14995                    0.0    0.888889     0.0  \n",
       " 14996                    0.0    0.888889     0.0  \n",
       " 14997                    0.0    0.888889     0.0  \n",
       " 14998                    0.0    0.888889     0.0  \n",
       " \n",
       " [14999 rows x 9 columns], 0        1\n",
       " 1        1\n",
       " 2        1\n",
       " 3        1\n",
       " 4        1\n",
       "         ..\n",
       " 14994    1\n",
       " 14995    1\n",
       " 14996    1\n",
       " 14997    1\n",
       " 14998    1\n",
       " Name: left, Length: 14999, dtype: int64)"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 总的预处理函数\n",
    "def hr_preprocessing(sl=False,le=False,npr=False,amh=False,tsc=False,wa=False,pl5=False,slr=False,dp=False,lower_d=False,ld_n=1):\n",
    "    # 读数据\n",
    "    df = pd.read_csv('./data/HR.csv')\n",
    "    # 1.清洗数据\n",
    "    df = df.dropna(subset=['satisfaction_level'])\n",
    "    df = df[df['last_evaluation']<=1][df['salary']!='nme']\n",
    "    # 2.得到标注\n",
    "    label = df['left']\n",
    "    df = df.drop('left',axis=1)\n",
    "    # 3.特征选择，特征较少，先不删除特征\n",
    "    # 4.特征处理\n",
    "    # 连续属性\n",
    "    scaler_lst = [sl,le,npr,amh,tsc,wa,pl5]\n",
    "    column_lst = ['satisfaction_level','last_evaluation','number_project','average_monthly_hours',\n",
    "                 'time_spend_company','Work_accident','promotion_last_5years']\n",
    "    for i in range(len(scaler_lst)):\n",
    "        if not scaler_lst[i]:\n",
    "            df[column_lst[i]] = MinMaxScaler().fit_transform(df[column_lst[i]].values.reshape(-1,1))\n",
    "        else:\n",
    "            df[column_lst[i]] = StandardScaler().fit_transform(df[column_lst[i]].values.reshape(-1,1))\n",
    "    # 离散属性\n",
    "    # 数值化重写map函数，把salary对应到我们想要的数值\n",
    "    def map_salary(s):\n",
    "        d = dict([('low',0),('medium',1),('high',2)])\n",
    "        return d.get(s,0)\n",
    "    scaler_lst = [slr,dp]\n",
    "    column_lst = ['salary','department']\n",
    "    for i in range(len(scaler_lst)):\n",
    "        if not scaler_lst[i]:\n",
    "            if column_lst[i] == 'salary':\n",
    "                df[column_lst[i]] = [map_salary(s) for s in df['salary'].values]\n",
    "            else:\n",
    "                df[column_lst[i]] = LabelEncoder().fit_transform(df[column_lst[i]])\n",
    "            # 归一化处理\n",
    "            df[column_lst[i]] = MinMaxScaler().fit_transform(df[column_lst[i]].values.reshape(-1, 1))\n",
    "        else:\n",
    "            df = pd.get_dummies(df,columns=[column_lst[i]])\n",
    "    # 5.降维\n",
    "    if lower_d:\n",
    "        # 因为标注只有两类，LDA降维只剩1类，所以不使用LDA，使用PCA\n",
    "        # return LinearDiscriminantAnalysis(n_components=ld_n)\n",
    "        return PCA(n_components=ld_n).fit_transform(df.values),label\n",
    "    return df,label\n",
    "\n",
    "features,label = hr_preprocessing()\n",
    "features,label"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 数据集切分为训练集、测试集、验证集"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "8999 3000 3000\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "(array([[0.87912088, 0.3125    , 0.2       , ..., 0.        , 0.88888889,\n",
       "         0.5       ],\n",
       "        [0.51648352, 0.890625  , 0.4       , ..., 0.        , 0.11111111,\n",
       "         0.        ],\n",
       "        [0.34065934, 0.15625   , 0.        , ..., 0.        , 0.55555556,\n",
       "         0.        ],\n",
       "        ...,\n",
       "        [0.95604396, 0.8125    , 0.        , ..., 0.        , 0.        ,\n",
       "         0.        ],\n",
       "        [0.79120879, 0.3125    , 0.4       , ..., 0.        , 0.11111111,\n",
       "         0.5       ],\n",
       "        [0.31868132, 0.296875  , 0.        , ..., 0.        , 0.77777778,\n",
       "         0.        ]]),\n",
       " array([0, 0, 1, ..., 0, 0, 1], dtype=int64),\n",
       " array([[0.28571429, 0.25      , 0.6       , ..., 0.        , 0.33333333,\n",
       "         0.        ],\n",
       "        [0.69230769, 0.875     , 0.2       , ..., 0.        , 0.77777778,\n",
       "         0.5       ],\n",
       "        [0.71428571, 0.796875  , 0.6       , ..., 0.        , 0.22222222,\n",
       "         1.        ],\n",
       "        ...,\n",
       "        [0.6043956 , 0.609375  , 0.4       , ..., 0.        , 0.88888889,\n",
       "         0.5       ],\n",
       "        [0.40659341, 0.28125   , 0.        , ..., 0.        , 0.77777778,\n",
       "         0.5       ],\n",
       "        [0.76923077, 0.796875  , 0.4       , ..., 0.        , 0.77777778,\n",
       "         0.5       ]]),\n",
       " array([0, 0, 0, ..., 0, 1, 0], dtype=int64),\n",
       " array([[0.87912088, 0.84375   , 0.4       , ..., 0.        , 0.88888889,\n",
       "         0.5       ],\n",
       "        [0.36263736, 0.15625   , 0.        , ..., 0.        , 1.        ,\n",
       "         0.        ],\n",
       "        [0.43956044, 0.859375  , 0.4       , ..., 0.        , 0.22222222,\n",
       "         0.        ],\n",
       "        ...,\n",
       "        [0.86813187, 0.546875  , 0.6       , ..., 0.        , 0.88888889,\n",
       "         0.5       ],\n",
       "        [0.16483516, 0.90625   , 0.8       , ..., 0.        , 0.33333333,\n",
       "         0.5       ],\n",
       "        [0.48351648, 0.65625   , 0.6       , ..., 0.        , 1.        ,\n",
       "         0.        ]]),\n",
       " array([0, 1, 0, ..., 0, 0, 0], dtype=int64))"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "def hr_modeling(features,label):\n",
    "    f_v = features.values\n",
    "    l_v = label.values\n",
    "    f_names = features.columns.values\n",
    "    # 切分数据集，6:2:2\n",
    "    X_tt,X_validation,Y_tt,Y_validation = train_test_split(f_v,l_v,test_size=0.2)\n",
    "    X_train,X_test,Y_train,Y_test = train_test_split(X_tt,Y_tt,test_size=0.25)\n",
    "    print(len(X_train),len(X_validation),len(X_test))\n",
    "    return X_train,Y_train,X_test,Y_test,X_validation,Y_validation,f_names,f_v,l_v\n",
    "\n",
    "X_train,Y_train,X_test,Y_test,X_validation,Y_validation,f_names,f_v,l_v = hr_modeling(features,label)\n",
    "X_train,Y_train,X_test,Y_test,X_validation,Y_validation"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 监督学习-分类"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### KNN"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train\n",
      "ACC: 0.974997221913546\n",
      "REC: 0.9603024574669187\n",
      "F-Score 0.9475402191653066\n",
      "Validation\n",
      "ACC: 0.955\n",
      "REC: 0.9298245614035088\n",
      "F-Score 0.9107732980832783\n",
      "Test\n",
      "ACC: 0.9526666666666667\n",
      "REC: 0.9299719887955182\n",
      "F-Score 0.9034013605442176\n"
     ]
    }
   ],
   "source": [
    "# knn导包\n",
    "from sklearn.neighbors import NearestNeighbors,KNeighborsClassifier\n",
    "from sklearn.metrics import accuracy_score,recall_score,f1_score\n",
    "knn_clf = KNeighborsClassifier(n_neighbors=3)\n",
    "knn_clf.fit(X_train,Y_train)\n",
    "\n",
    "# 对训练集进行预测\n",
    "Y_pred = knn_clf.predict(X_train)\n",
    "print('Train')\n",
    "print('ACC:',accuracy_score(Y_train,Y_pred))\n",
    "print('REC:',recall_score(Y_train,Y_pred))\n",
    "print('F-Score',f1_score(Y_train,Y_pred))\n",
    "\n",
    "# 对验证集进行预测\n",
    "Y_pred = knn_clf.predict(X_validation)\n",
    "print('Validation')\n",
    "print('ACC:',accuracy_score(Y_validation,Y_pred))\n",
    "print('REC:',recall_score(Y_validation,Y_pred))\n",
    "print('F-Score',f1_score(Y_validation,Y_pred))\n",
    "\n",
    "# 对测试集进行预测\n",
    "Y_pred = knn_clf.predict(X_test)\n",
    "print('Test')\n",
    "print('ACC:',accuracy_score(Y_test,Y_pred))\n",
    "print('REC:',recall_score(Y_test,Y_pred))\n",
    "print('F-Score',f1_score(Y_test,Y_pred))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "f:\\condaenv\\py36\\lib\\site-packages\\sklearn\\externals\\joblib\\__init__.py:15: DeprecationWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n",
      "  warnings.warn(msg, category=DeprecationWarning)\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "['knn_clf']"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 将训练好的模型保存\n",
    "from sklearn.externals import joblib\n",
    "joblib.dump(knn_clf,'knn_clf')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Test2\n",
      "ACC: 0.9526666666666667\n",
      "REC: 0.9299719887955182\n",
      "F-Score 0.9034013605442176\n"
     ]
    }
   ],
   "source": [
    "# 加载训练好的模型，并进行推理\n",
    "knn_clf2 = joblib.load('knn_clf')\n",
    "Y_pred = knn_clf2.predict(X_test)\n",
    "print('Test2')\n",
    "print('ACC:',accuracy_score(Y_test,Y_pred))\n",
    "print('REC:',recall_score(Y_test,Y_pred))\n",
    "print('F-Score',f1_score(Y_test,Y_pred))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "KNN -ACC: 0.974997221913546\n",
      "KNN -REC: 0.9603024574669187\n",
      "KNN -F1: 0.9475402191653066\n",
      "1\n",
      "KNN -ACC: 0.955\n",
      "KNN -REC: 0.9298245614035088\n",
      "KNN -F1: 0.9107732980832783\n",
      "2\n",
      "KNN -ACC: 0.9526666666666667\n",
      "KNN -REC: 0.9299719887955182\n",
      "KNN -F1: 0.9034013605442176\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "('KNN',\n",
       " KNeighborsClassifier(algorithm='auto', leaf_size=30, metric='minkowski',\n",
       "                      metric_params=None, n_jobs=None, n_neighbors=3, p=2,\n",
       "                      weights='uniform'))"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 构建模型模板\n",
    "models = []\n",
    "models.append(('KNN',knn_clf))\n",
    "def model_fit_print():\n",
    "    for clf_name,clf in models:\n",
    "        clf.fit(X_train,Y_train)\n",
    "        xy_test = [(X_train,Y_train),(X_validation,Y_validation),(X_test,Y_test)]\n",
    "        for i in range(len(xy_test)):\n",
    "            X_part = xy_test[i][0]\n",
    "            Y_part = xy_test[i][1]\n",
    "            Y_pred = clf.predict(X_part)\n",
    "            print(i)\n",
    "            print(clf_name,\"-ACC:\",accuracy_score(Y_part,Y_pred))\n",
    "            print(clf_name,\"-REC:\",recall_score(Y_part,Y_pred))\n",
    "            print(clf_name,\"-F1:\",f1_score(Y_part,Y_pred))\n",
    "        return clf_name,clf\n",
    "\n",
    "model_fit_print()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 朴素贝叶斯\n",
    "贝叶斯用到的值都是离散的，\n",
    "如果这些值都是二值（0,1）的话，就用伯努利贝叶斯，若值是连续的，伯努利贝叶斯也会将其二值化，\n",
    "如果值服从高斯分布，就用高斯贝叶斯"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "GaussianNB -ACC: 0.8033114790532281\n",
      "GaussianNB -REC: 0.695179584120983\n",
      "GaussianNB -F1: 0.6243633276740238\n",
      "1\n",
      "GaussianNB -ACC: 0.8043333333333333\n",
      "GaussianNB -REC: 0.7314439946018894\n",
      "GaussianNB -F1: 0.6487133453022143\n",
      "2\n",
      "GaussianNB -ACC: 0.801\n",
      "GaussianNB -REC: 0.6862745098039216\n",
      "GaussianNB -F1: 0.62143310082435\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "('GaussianNB', GaussianNB(priors=None, var_smoothing=1e-09))"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.naive_bayes import GaussianNB,BernoulliNB\n",
    "models = []\n",
    "models.append(('GaussianNB',GaussianNB()))\n",
    "model_fit_print()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "BernoulliNB -ACC: 0.845316146238471\n",
      "BernoulliNB -REC: 0.46975425330812853\n",
      "BernoulliNB -F1: 0.5881656804733728\n",
      "1\n",
      "BernoulliNB -ACC: 0.8416666666666667\n",
      "BernoulliNB -REC: 0.49527665317139\n",
      "BernoulliNB -F1: 0.6071133167907362\n",
      "2\n",
      "BernoulliNB -ACC: 0.8303333333333334\n",
      "BernoulliNB -REC: 0.44397759103641454\n",
      "BernoulliNB -F1: 0.5546806649168853\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "('BernoulliNB',\n",
       " BernoulliNB(alpha=1.0, binarize=0.0, class_prior=None, fit_prior=True))"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "models = []\n",
    "models.append(('BernoulliNB',BernoulliNB()))\n",
    "model_fit_print()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 决策树"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "DecisionTreeGini -ACC: 1.0\n",
      "DecisionTreeGini -REC: 1.0\n",
      "DecisionTreeGini -F1: 1.0\n",
      "1\n",
      "DecisionTreeGini -ACC: 0.9766666666666667\n",
      "DecisionTreeGini -REC: 0.9689608636977058\n",
      "DecisionTreeGini -F1: 0.953519256308101\n",
      "2\n",
      "DecisionTreeGini -ACC: 0.9783333333333334\n",
      "DecisionTreeGini -REC: 0.9495798319327731\n",
      "DecisionTreeGini -F1: 0.9542575650950035\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "('DecisionTreeGini',\n",
       " DecisionTreeClassifier(class_weight=None, criterion='gini', max_depth=None,\n",
       "                        max_features=None, max_leaf_nodes=None,\n",
       "                        min_impurity_decrease=0.0, min_impurity_split=None,\n",
       "                        min_samples_leaf=1, min_samples_split=2,\n",
       "                        min_weight_fraction_leaf=0.0, presort=False,\n",
       "                        random_state=None, splitter='best'))"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.tree import DecisionTreeClassifier,export_graphviz\n",
    "models = []\n",
    "models.append(('DecisionTreeGini',DecisionTreeClassifier()))\n",
    "model_fit_print()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "f:\\condaenv\\py36\\lib\\site-packages\\sklearn\\externals\\six.py:31: DeprecationWarning: The module is deprecated in version 0.21 and will be removed in version 0.23 since we've dropped support for Python 2.7. Please rely on the official version of six (https://pypi.org/project/six/).\n",
      "  \"(https://pypi.org/project/six/).\", DeprecationWarning)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "DecisionTreeGini -ACC: 1.0\n",
      "DecisionTreeGini -REC: 1.0\n",
      "DecisionTreeGini -F1: 1.0\n",
      "1\n",
      "DecisionTreeGini -ACC: 0.9746666666666667\n",
      "DecisionTreeGini -REC: 0.9676113360323887\n",
      "DecisionTreeGini -F1: 0.9496688741721855\n",
      "2\n",
      "DecisionTreeGini -ACC: 0.9763333333333334\n",
      "DecisionTreeGini -REC: 0.9481792717086834\n",
      "DecisionTreeGini -F1: 0.9501754385964912\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "True"
      ]
     },
     "execution_count": 12,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 将决策树可视化\n",
    "import pydotplus\n",
    "from sklearn.externals.six import StringIO\n",
    "clf_name,clf = model_fit_print()\n",
    "dot_data = export_graphviz(clf,out_file=None,\n",
    "                           feature_names=f_names,\n",
    "                           class_names=['NL','L'],\n",
    "                           filled=True,\n",
    "                           rounded=True,\n",
    "                           special_characters=True)\n",
    "graph = pydotplus.graph_from_dot_data(dot_data)\n",
    "graph.write_pdf('dt_tree.pdf')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "DecisionTreeEntropy -ACC: 1.0\n",
      "DecisionTreeEntropy -REC: 1.0\n",
      "DecisionTreeEntropy -F1: 1.0\n",
      "1\n",
      "DecisionTreeEntropy -ACC: 0.983\n",
      "DecisionTreeEntropy -REC: 0.9676113360323887\n",
      "DecisionTreeEntropy -F1: 0.9656565656565657\n",
      "2\n",
      "DecisionTreeEntropy -ACC: 0.977\n",
      "DecisionTreeEntropy -REC: 0.9481792717086834\n",
      "DecisionTreeEntropy -F1: 0.9515108924806747\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "('DecisionTreeEntropy',\n",
       " DecisionTreeClassifier(class_weight=None, criterion='entropy', max_depth=None,\n",
       "                        max_features=None, max_leaf_nodes=None,\n",
       "                        min_impurity_decrease=0.0, min_impurity_split=None,\n",
       "                        min_samples_leaf=1, min_samples_split=2,\n",
       "                        min_weight_fraction_leaf=0.0, presort=False,\n",
       "                        random_state=None, splitter='best'))"
      ]
     },
     "execution_count": 13,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 使用信息增益进行决策树\n",
    "models = []\n",
    "models.append(('DecisionTreeEntropy',DecisionTreeClassifier(criterion='entropy')))\n",
    "model_fit_print()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "DecisionTreeGini2 -ACC: 0.821313479275475\n",
      "DecisionTreeGini2 -REC: 0.7126654064272212\n",
      "DecisionTreeGini2 -F1: 0.6522491349480969\n",
      "1\n",
      "DecisionTreeGini2 -ACC: 0.8206666666666667\n",
      "DecisionTreeGini2 -REC: 0.7233468286099866\n",
      "DecisionTreeGini2 -F1: 0.6658385093167702\n",
      "2\n",
      "DecisionTreeGini2 -ACC: 0.818\n",
      "DecisionTreeGini2 -REC: 0.6820728291316527\n",
      "DecisionTreeGini2 -F1: 0.6407894736842105\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "('DecisionTreeGini2',\n",
       " DecisionTreeClassifier(class_weight=None, criterion='gini', max_depth=None,\n",
       "                        max_features=None, max_leaf_nodes=None,\n",
       "                        min_impurity_decrease=0.1, min_impurity_split=None,\n",
       "                        min_samples_leaf=1, min_samples_split=2,\n",
       "                        min_weight_fraction_leaf=0.0, presort=False,\n",
       "                        random_state=None, splitter='best'))"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 使用最小不纯度为0.1切分进行决策树\n",
    "models = []\n",
    "models.append(('DecisionTreeGini2',DecisionTreeClassifier(min_impurity_decrease=0.1)))\n",
    "model_fit_print()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 支持向量机"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "SVM -ACC: 0.9605511723524837\n",
      "SVM -REC: 0.9092627599243857\n",
      "SVM -F1: 0.9155365215322389\n",
      "1\n",
      "SVM -ACC: 0.9623333333333334\n",
      "SVM -REC: 0.9257759784075573\n",
      "SVM -F1: 0.9239057239057238\n",
      "2\n",
      "SVM -ACC: 0.956\n",
      "SVM -REC: 0.9061624649859944\n",
      "SVM -F1: 0.9074333800841514\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "('SVM', SVC(C=1000, cache_size=200, class_weight=None, coef0=0.0,\n",
       "     decision_function_shape='ovr', degree=3, gamma='auto', kernel='rbf',\n",
       "     max_iter=-1, probability=False, random_state=None, shrinking=True,\n",
       "     tol=0.001, verbose=False))"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.svm import SVC\n",
    "models = []\n",
    "models.append(('SVM',SVC(gamma='auto',C=1000)))\n",
    "model_fit_print()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 集成方法-随机森林"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "RandomForest -ACC: 0.9972219135459496\n",
      "RandomForest -REC: 0.9891304347826086\n",
      "RandomForest -F1: 0.9940631678936119\n",
      "1\n",
      "RandomForest -ACC: 0.99\n",
      "RandomForest -REC: 0.9649122807017544\n",
      "RandomForest -F1: 0.9794520547945206\n",
      "2\n",
      "RandomForest -ACC: 0.987\n",
      "RandomForest -REC: 0.9467787114845938\n",
      "RandomForest -F1: 0.9719626168224299\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "f:\\condaenv\\py36\\lib\\site-packages\\sklearn\\ensemble\\forest.py:245: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n",
      "  \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "('RandomForest',\n",
       " RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',\n",
       "                        max_depth=None, max_features='auto', max_leaf_nodes=None,\n",
       "                        min_impurity_decrease=0.0, min_impurity_split=None,\n",
       "                        min_samples_leaf=1, min_samples_split=2,\n",
       "                        min_weight_fraction_leaf=0.0, n_estimators=10,\n",
       "                        n_jobs=None, oob_score=False, random_state=None,\n",
       "                        verbose=0, warm_start=False))"
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.ensemble import RandomForestClassifier\n",
    "models = []\n",
    "models.append(('RandomForest',RandomForestClassifier()))\n",
    "model_fit_print()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 集成方法-提升法AdaBoost"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "AdaBoost -ACC: 0.9592176908545393\n",
      "AdaBoost -REC: 0.9083175803402647\n",
      "AdaBoost -F1: 0.9128473046782237\n",
      "1\n",
      "AdaBoost -ACC: 0.958\n",
      "AdaBoost -REC: 0.8974358974358975\n",
      "AdaBoost -F1: 0.9134615384615385\n",
      "2\n",
      "AdaBoost -ACC: 0.9546666666666667\n",
      "AdaBoost -REC: 0.8851540616246498\n",
      "AdaBoost -F1: 0.9028571428571429\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "('AdaBoost',\n",
       " AdaBoostClassifier(algorithm='SAMME.R', base_estimator=None, learning_rate=1.0,\n",
       "                    n_estimators=50, random_state=None))"
      ]
     },
     "execution_count": 17,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.ensemble import AdaBoostClassifier\n",
    "models = []\n",
    "models.append(('AdaBoost',AdaBoostClassifier()))\n",
    "model_fit_print()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 监督学习-回归"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 线性回归"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "X        number_project  average_monthly_hours\n",
      "0                 0.0               0.285047\n",
      "1                 0.6               0.775701\n",
      "2                 1.0               0.822430\n",
      "3                 0.6               0.593458\n",
      "4                 0.0               0.294393\n",
      "...               ...                    ...\n",
      "14994             0.0               0.257009\n",
      "14995             0.0               0.299065\n",
      "14996             0.0               0.219626\n",
      "14997             0.8               0.859813\n",
      "14998             0.0               0.289720\n",
      "\n",
      "[14999 rows x 2 columns]\n",
      "Y 0        0.265625\n",
      "1        0.781250\n",
      "2        0.812500\n",
      "3        0.796875\n",
      "4        0.250000\n",
      "           ...   \n",
      "14994    0.328125\n",
      "14995    0.187500\n",
      "14996    0.265625\n",
      "14997    0.937500\n",
      "14998    0.250000\n",
      "Name: last_evaluation, Length: 14999, dtype: float64\n",
      "LinearRegression\n",
      "Coef: [0.27268022 0.26917309]\n",
      "MSE: 0.05953800649100494\n",
      "Ridge\n",
      "Coef: [0.27265976 0.26914916]\n",
      "MSE: 0.05953800657114579\n",
      "Lasso\n",
      "Coef: [0.25039551 0.24227119]\n",
      "MSE: 0.0596363767370062\n"
     ]
    }
   ],
   "source": [
    "# 引入线性回归，岭回归，lasso回归\n",
    "from sklearn.linear_model import LinearRegression,Ridge,Lasso\n",
    "from sklearn.metrics import mean_squared_error\n",
    "def regr_test(features,label):\n",
    "    print('X',features)\n",
    "    print('Y',label)\n",
    "    regr = LinearRegression()\n",
    "    regr.fit(features.values,label.values)\n",
    "    Y_pred = regr.predict(features.values)\n",
    "    print('LinearRegression')\n",
    "    print('Coef:',regr.coef_)\n",
    "    print('MSE:',mean_squared_error(Y_pred,label.values))\n",
    "    regr = Ridge(alpha=0.1)\n",
    "    regr.fit(features.values,label.values)\n",
    "    Y_pred = regr.predict(features.values)\n",
    "    print('Ridge')\n",
    "    print('Coef:',regr.coef_)\n",
    "    print('MSE:',mean_squared_error(Y_pred,label.values))\n",
    "    regr = Lasso(alpha=0.002)\n",
    "    regr.fit(features.values,label.values)\n",
    "    Y_pred = regr.predict(features.values)\n",
    "    print('Lasso')\n",
    "    print('Coef:',regr.coef_)\n",
    "    print('MSE:',mean_squared_error(Y_pred,label.values))\n",
    "\n",
    "regr_test(features[['number_project','average_monthly_hours']],features['last_evaluation'])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 逻辑回归"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "LogisticRegression -ACC: 0.7977553061451272\n",
      "LogisticRegression -REC: 0.3686200378071834\n",
      "LogisticRegression -F1: 0.46153846153846156\n",
      "1\n",
      "LogisticRegression -ACC: 0.807\n",
      "LogisticRegression -REC: 0.41700404858299595\n",
      "LogisticRegression -F1: 0.5162907268170426\n",
      "2\n",
      "LogisticRegression -ACC: 0.7866666666666666\n",
      "LogisticRegression -REC: 0.3403361344537815\n",
      "LogisticRegression -F1: 0.4316163410301954\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "f:\\condaenv\\py36\\lib\\site-packages\\sklearn\\linear_model\\logistic.py:432: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n",
      "  FutureWarning)\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "('LogisticRegression',\n",
       " LogisticRegression(C=1000, class_weight=None, dual=False, fit_intercept=True,\n",
       "                    intercept_scaling=1, l1_ratio=None, max_iter=100,\n",
       "                    multi_class='warn', n_jobs=None, penalty='l2',\n",
       "                    random_state=None, solver='warn', tol=1e-10, verbose=0,\n",
       "                    warm_start=False))"
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.linear_model import LogisticRegression\n",
    "models = []\n",
    "models.append(('LogisticRegression',LogisticRegression(C=1000,tol=1e-10)))\n",
    "model_fit_print()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 人工神经网络"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1/500\n",
      "8999/8999 [==============================] - 0s 22us/step - loss: 0.6158\n",
      "Epoch 2/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.5722\n",
      "Epoch 3/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.5193\n",
      "Epoch 4/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.4585\n",
      "Epoch 5/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.3929\n",
      "Epoch 6/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.3290\n",
      "Epoch 7/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.2733\n",
      "Epoch 8/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.2314\n",
      "Epoch 9/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.2052\n",
      "Epoch 10/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1908\n",
      "Epoch 11/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1836\n",
      "Epoch 12/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1806\n",
      "Epoch 13/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1795\n",
      "Epoch 14/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1791\n",
      "Epoch 15/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1790\n",
      "Epoch 16/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1790\n",
      "Epoch 17/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1789\n",
      "Epoch 18/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1788\n",
      "Epoch 19/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1786\n",
      "Epoch 20/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1785\n",
      "Epoch 21/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1783\n",
      "Epoch 22/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1781\n",
      "Epoch 23/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1780\n",
      "Epoch 24/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1778\n",
      "Epoch 25/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1777\n",
      "Epoch 26/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1775\n",
      "Epoch 27/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1774\n",
      "Epoch 28/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1772\n",
      "Epoch 29/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1771\n",
      "Epoch 30/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1769\n",
      "Epoch 31/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1768\n",
      "Epoch 32/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1766\n",
      "Epoch 33/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1765\n",
      "Epoch 34/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1763\n",
      "Epoch 35/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1762\n",
      "Epoch 36/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1760\n",
      "Epoch 37/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1759\n",
      "Epoch 38/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1757\n",
      "Epoch 39/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1755\n",
      "Epoch 40/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1754\n",
      "Epoch 41/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1752\n",
      "Epoch 42/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1750\n",
      "Epoch 43/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1749\n",
      "Epoch 44/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1747\n",
      "Epoch 45/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1745\n",
      "Epoch 46/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1744\n",
      "Epoch 47/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1742\n",
      "Epoch 48/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1740\n",
      "Epoch 49/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1738\n",
      "Epoch 50/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1737\n",
      "Epoch 51/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1735\n",
      "Epoch 52/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1733\n",
      "Epoch 53/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1731\n",
      "Epoch 54/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1730\n",
      "Epoch 55/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1728\n",
      "Epoch 56/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1726\n",
      "Epoch 57/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1724\n",
      "Epoch 58/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1722\n",
      "Epoch 59/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1721\n",
      "Epoch 60/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1719\n",
      "Epoch 61/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1717\n",
      "Epoch 62/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1715\n",
      "Epoch 63/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1713\n",
      "Epoch 64/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1711\n",
      "Epoch 65/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1709\n",
      "Epoch 66/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1708\n",
      "Epoch 67/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1706\n",
      "Epoch 68/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1704\n",
      "Epoch 69/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1702\n",
      "Epoch 70/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1700\n",
      "Epoch 71/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1698\n",
      "Epoch 72/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1696\n",
      "Epoch 73/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1694\n",
      "Epoch 74/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1692\n",
      "Epoch 75/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1690\n",
      "Epoch 76/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1688\n",
      "Epoch 77/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1686\n",
      "Epoch 78/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1684\n",
      "Epoch 79/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1682\n",
      "Epoch 80/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1680\n",
      "Epoch 81/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1678\n",
      "Epoch 82/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1676\n",
      "Epoch 83/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1674\n",
      "Epoch 84/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1672\n",
      "Epoch 85/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1670\n",
      "Epoch 86/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1668\n",
      "Epoch 87/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1666\n",
      "Epoch 88/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1664\n",
      "Epoch 89/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1662\n",
      "Epoch 90/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1660\n",
      "Epoch 91/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1658\n",
      "Epoch 92/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1656\n",
      "Epoch 93/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1654\n",
      "Epoch 94/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1652\n",
      "Epoch 95/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1650\n",
      "Epoch 96/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1648\n",
      "Epoch 97/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1646\n",
      "Epoch 98/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1643\n",
      "Epoch 99/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1641\n",
      "Epoch 100/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1639\n",
      "Epoch 101/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1637\n",
      "Epoch 102/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1635\n",
      "Epoch 103/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1633\n",
      "Epoch 104/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1631\n",
      "Epoch 105/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1629\n",
      "Epoch 106/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1627\n",
      "Epoch 107/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1625\n",
      "Epoch 108/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1622\n",
      "Epoch 109/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1620\n",
      "Epoch 110/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1618\n",
      "Epoch 111/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1616\n",
      "Epoch 112/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1614\n",
      "Epoch 113/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1612\n",
      "Epoch 114/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1610\n",
      "Epoch 115/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1608\n",
      "Epoch 116/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1606\n",
      "Epoch 117/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1604\n",
      "Epoch 118/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1602\n",
      "Epoch 119/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1600\n",
      "Epoch 120/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1597\n",
      "Epoch 121/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1596\n",
      "Epoch 122/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1593\n",
      "Epoch 123/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1591\n",
      "Epoch 124/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1589\n",
      "Epoch 125/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1587\n",
      "Epoch 126/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1585\n",
      "Epoch 127/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1583\n",
      "Epoch 128/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1581\n",
      "Epoch 129/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1579\n",
      "Epoch 130/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1577\n",
      "Epoch 131/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1575\n",
      "Epoch 132/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1573\n",
      "Epoch 133/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1571\n",
      "Epoch 134/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1569\n",
      "Epoch 135/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1567\n",
      "Epoch 136/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1565\n",
      "Epoch 137/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1563\n",
      "Epoch 138/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1562\n",
      "Epoch 139/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1560\n",
      "Epoch 140/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1558\n",
      "Epoch 141/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1556\n",
      "Epoch 142/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1554\n",
      "Epoch 143/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1552\n",
      "Epoch 144/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1550\n",
      "Epoch 145/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1548\n",
      "Epoch 146/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1547\n",
      "Epoch 147/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1545\n",
      "Epoch 148/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1543\n",
      "Epoch 149/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1541\n",
      "Epoch 150/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1540\n",
      "Epoch 151/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1538\n",
      "Epoch 152/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1536\n",
      "Epoch 153/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1534\n",
      "Epoch 154/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1533\n",
      "Epoch 155/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1531\n",
      "Epoch 156/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1530\n",
      "Epoch 157/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1528\n",
      "Epoch 158/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1526\n",
      "Epoch 159/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1525\n",
      "Epoch 160/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1523\n",
      "Epoch 161/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1522\n",
      "Epoch 162/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1520\n",
      "Epoch 163/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1518\n",
      "Epoch 164/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1517\n",
      "Epoch 165/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1515\n",
      "Epoch 166/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1514\n",
      "Epoch 167/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1513\n",
      "Epoch 168/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1511\n",
      "Epoch 169/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1510\n",
      "Epoch 170/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1508\n",
      "Epoch 171/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1507\n",
      "Epoch 172/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1506\n",
      "Epoch 173/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1504\n",
      "Epoch 174/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1503\n",
      "Epoch 175/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1502\n",
      "Epoch 176/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1500\n",
      "Epoch 177/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1499\n",
      "Epoch 178/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1498\n",
      "Epoch 179/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1497\n",
      "Epoch 180/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1495\n",
      "Epoch 181/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1494\n",
      "Epoch 182/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1493\n",
      "Epoch 183/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1492\n",
      "Epoch 184/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1490\n",
      "Epoch 185/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1489\n",
      "Epoch 186/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1488\n",
      "Epoch 187/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1487\n",
      "Epoch 188/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1486\n",
      "Epoch 189/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1485\n",
      "Epoch 190/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1484\n",
      "Epoch 191/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1483\n",
      "Epoch 192/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1482\n",
      "Epoch 193/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1481\n",
      "Epoch 194/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1480\n",
      "Epoch 195/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1478\n",
      "Epoch 196/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1477\n",
      "Epoch 197/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1476\n",
      "Epoch 198/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1475\n",
      "Epoch 199/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1474\n",
      "Epoch 200/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1473\n",
      "Epoch 201/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1473\n",
      "Epoch 202/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1472\n",
      "Epoch 203/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1471\n",
      "Epoch 204/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1470\n",
      "Epoch 205/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1469\n",
      "Epoch 206/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1468\n",
      "Epoch 207/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1467\n",
      "Epoch 208/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1466\n",
      "Epoch 209/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1465\n",
      "Epoch 210/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1464\n",
      "Epoch 211/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1464\n",
      "Epoch 212/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1463\n",
      "Epoch 213/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1462\n",
      "Epoch 214/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1461\n",
      "Epoch 215/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1460\n",
      "Epoch 216/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1459\n",
      "Epoch 217/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1459\n",
      "Epoch 218/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1458\n",
      "Epoch 219/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1457\n",
      "Epoch 220/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1457\n",
      "Epoch 221/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1456\n",
      "Epoch 222/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1455\n",
      "Epoch 223/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1454\n",
      "Epoch 224/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1453\n",
      "Epoch 225/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1453\n",
      "Epoch 226/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1452\n",
      "Epoch 227/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1451\n",
      "Epoch 228/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1450\n",
      "Epoch 229/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1450\n",
      "Epoch 230/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1449\n",
      "Epoch 231/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1448\n",
      "Epoch 232/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1447\n",
      "Epoch 233/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1447\n",
      "Epoch 234/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1446\n",
      "Epoch 235/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1445\n",
      "Epoch 236/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1445\n",
      "Epoch 237/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1444\n",
      "Epoch 238/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1443\n",
      "Epoch 239/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1443\n",
      "Epoch 240/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1442\n",
      "Epoch 241/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1441\n",
      "Epoch 242/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1441\n",
      "Epoch 243/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1440\n",
      "Epoch 244/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1439\n",
      "Epoch 245/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1439\n",
      "Epoch 246/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1438\n",
      "Epoch 247/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1438\n",
      "Epoch 248/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1437\n",
      "Epoch 249/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1436\n",
      "Epoch 250/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1436\n",
      "Epoch 251/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1435\n",
      "Epoch 252/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1435\n",
      "Epoch 253/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1434\n",
      "Epoch 254/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1434\n",
      "Epoch 255/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1433\n",
      "Epoch 256/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1433\n",
      "Epoch 257/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1432\n",
      "Epoch 258/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1431\n",
      "Epoch 259/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1431\n",
      "Epoch 260/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1430\n",
      "Epoch 261/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1430\n",
      "Epoch 262/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1429\n",
      "Epoch 263/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1429\n",
      "Epoch 264/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1428\n",
      "Epoch 265/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1428\n",
      "Epoch 266/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1427\n",
      "Epoch 267/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1427\n",
      "Epoch 268/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1426\n",
      "Epoch 269/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1426\n",
      "Epoch 270/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1425\n",
      "Epoch 271/500\n",
      "8999/8999 [==============================] - 0s 4us/step - loss: 0.1425\n",
      "Epoch 272/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1424\n",
      "Epoch 273/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1424\n",
      "Epoch 274/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1423\n",
      "Epoch 275/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1423\n",
      "Epoch 276/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1422\n",
      "Epoch 277/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1422\n",
      "Epoch 278/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1421\n",
      "Epoch 279/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1421\n",
      "Epoch 280/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1420\n",
      "Epoch 281/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1420\n",
      "Epoch 282/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1419\n",
      "Epoch 283/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1419\n",
      "Epoch 284/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1418\n",
      "Epoch 285/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1418\n",
      "Epoch 286/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1418\n",
      "Epoch 287/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1417\n",
      "Epoch 288/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1417\n",
      "Epoch 289/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1416\n",
      "Epoch 290/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1416\n",
      "Epoch 291/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1415\n",
      "Epoch 292/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1415\n",
      "Epoch 293/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1415\n",
      "Epoch 294/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1414\n",
      "Epoch 295/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1414\n",
      "Epoch 296/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1413\n",
      "Epoch 297/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1413\n",
      "Epoch 298/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1413\n",
      "Epoch 299/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1412\n",
      "Epoch 300/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1412\n",
      "Epoch 301/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1411\n",
      "Epoch 302/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1411\n",
      "Epoch 303/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1411\n",
      "Epoch 304/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1410\n",
      "Epoch 305/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1410\n",
      "Epoch 306/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1410\n",
      "Epoch 307/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1409\n",
      "Epoch 308/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1409\n",
      "Epoch 309/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1408\n",
      "Epoch 310/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1408\n",
      "Epoch 311/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1408\n",
      "Epoch 312/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1407\n",
      "Epoch 313/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1407\n",
      "Epoch 314/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1407\n",
      "Epoch 315/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1406\n",
      "Epoch 316/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1406\n",
      "Epoch 317/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1405\n",
      "Epoch 318/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1405\n",
      "Epoch 319/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1405\n",
      "Epoch 320/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1404\n",
      "Epoch 321/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1404\n",
      "Epoch 322/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1404\n",
      "Epoch 323/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1404\n",
      "Epoch 324/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1403\n",
      "Epoch 325/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1403\n",
      "Epoch 326/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1402\n",
      "Epoch 327/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1402\n",
      "Epoch 328/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1402\n",
      "Epoch 329/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1401\n",
      "Epoch 330/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1401\n",
      "Epoch 331/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1401\n",
      "Epoch 332/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1400\n",
      "Epoch 333/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1400\n",
      "Epoch 334/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1400\n",
      "Epoch 335/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1399\n",
      "Epoch 336/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1399\n",
      "Epoch 337/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1399\n",
      "Epoch 338/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1398\n",
      "Epoch 339/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1398\n",
      "Epoch 340/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1398\n",
      "Epoch 341/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1397\n",
      "Epoch 342/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1397\n",
      "Epoch 343/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1397\n",
      "Epoch 344/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1396\n",
      "Epoch 345/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1396\n",
      "Epoch 346/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1396\n",
      "Epoch 347/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1395\n",
      "Epoch 348/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1395\n",
      "Epoch 349/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1395\n",
      "Epoch 350/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1394\n",
      "Epoch 351/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1394\n",
      "Epoch 352/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1394\n",
      "Epoch 353/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1394\n",
      "Epoch 354/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1393\n",
      "Epoch 355/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1393\n",
      "Epoch 356/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1393\n",
      "Epoch 357/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1392\n",
      "Epoch 358/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1392\n",
      "Epoch 359/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1392\n",
      "Epoch 360/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1391\n",
      "Epoch 361/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1391\n",
      "Epoch 362/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1391\n",
      "Epoch 363/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1391\n",
      "Epoch 364/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1390\n",
      "Epoch 365/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1390\n",
      "Epoch 366/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1390\n",
      "Epoch 367/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1389\n",
      "Epoch 368/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1389\n",
      "Epoch 369/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1389\n",
      "Epoch 370/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1388\n",
      "Epoch 371/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1388\n",
      "Epoch 372/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1388\n",
      "Epoch 373/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1387\n",
      "Epoch 374/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1387\n",
      "Epoch 375/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1387\n",
      "Epoch 376/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1387\n",
      "Epoch 377/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1386\n",
      "Epoch 378/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1386\n",
      "Epoch 379/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1386\n",
      "Epoch 380/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1385\n",
      "Epoch 381/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1385\n",
      "Epoch 382/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1385\n",
      "Epoch 383/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1384\n",
      "Epoch 384/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1384\n",
      "Epoch 385/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1384\n",
      "Epoch 386/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1383\n",
      "Epoch 387/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1383\n",
      "Epoch 388/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1383\n",
      "Epoch 389/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1383\n",
      "Epoch 390/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1382\n",
      "Epoch 391/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1382\n",
      "Epoch 392/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1382\n",
      "Epoch 393/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1381\n",
      "Epoch 394/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1381\n",
      "Epoch 395/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1381\n",
      "Epoch 396/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1380\n",
      "Epoch 397/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1380\n",
      "Epoch 398/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1380\n",
      "Epoch 399/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1379\n",
      "Epoch 400/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1379\n",
      "Epoch 401/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1379\n",
      "Epoch 402/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1378\n",
      "Epoch 403/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1378\n",
      "Epoch 404/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1378\n",
      "Epoch 405/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1377\n",
      "Epoch 406/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1377\n",
      "Epoch 407/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1377\n",
      "Epoch 408/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1377\n",
      "Epoch 409/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1376\n",
      "Epoch 410/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1376\n",
      "Epoch 411/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1376\n",
      "Epoch 412/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1375\n",
      "Epoch 413/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1375\n",
      "Epoch 414/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1374\n",
      "Epoch 415/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1374\n",
      "Epoch 416/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1374\n",
      "Epoch 417/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1373\n",
      "Epoch 418/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1373\n",
      "Epoch 419/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1373\n",
      "Epoch 420/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1372\n",
      "Epoch 421/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1372\n",
      "Epoch 422/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1372\n",
      "Epoch 423/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1371\n",
      "Epoch 424/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1371\n",
      "Epoch 425/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1370\n",
      "Epoch 426/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1370\n",
      "Epoch 427/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1370\n",
      "Epoch 428/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1369\n",
      "Epoch 429/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1369\n",
      "Epoch 430/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1369\n",
      "Epoch 431/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1368\n",
      "Epoch 432/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1368\n",
      "Epoch 433/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1367\n",
      "Epoch 434/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1367\n",
      "Epoch 435/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1367\n",
      "Epoch 436/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1366\n",
      "Epoch 437/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1366\n",
      "Epoch 438/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1366\n",
      "Epoch 439/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1365\n",
      "Epoch 440/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1365\n",
      "Epoch 441/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1364\n",
      "Epoch 442/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1364\n",
      "Epoch 443/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1364\n",
      "Epoch 444/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1363\n",
      "Epoch 445/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1363\n",
      "Epoch 446/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1362\n",
      "Epoch 447/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1362\n",
      "Epoch 448/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1362\n",
      "Epoch 449/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1361\n",
      "Epoch 450/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1361\n",
      "Epoch 451/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1360\n",
      "Epoch 452/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1360\n",
      "Epoch 453/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1359\n",
      "Epoch 454/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1359\n",
      "Epoch 455/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1358\n",
      "Epoch 456/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1358\n",
      "Epoch 457/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1358\n",
      "Epoch 458/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1357\n",
      "Epoch 459/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1357\n",
      "Epoch 460/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1356\n",
      "Epoch 461/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1356\n",
      "Epoch 462/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1355\n",
      "Epoch 463/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1355\n",
      "Epoch 464/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1354\n",
      "Epoch 465/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1354\n",
      "Epoch 466/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1353\n",
      "Epoch 467/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1353\n",
      "Epoch 468/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1352\n",
      "Epoch 469/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1352\n",
      "Epoch 470/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1351\n",
      "Epoch 471/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1351\n",
      "Epoch 472/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1350\n",
      "Epoch 473/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1350\n",
      "Epoch 474/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1349\n",
      "Epoch 475/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1349\n",
      "Epoch 476/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1348\n",
      "Epoch 477/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1348\n",
      "Epoch 478/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1347\n",
      "Epoch 479/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1346\n",
      "Epoch 480/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1346\n",
      "Epoch 481/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1345\n",
      "Epoch 482/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1345\n",
      "Epoch 483/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1344\n",
      "Epoch 484/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1344\n",
      "Epoch 485/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1343\n",
      "Epoch 486/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1342\n",
      "Epoch 487/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1342\n",
      "Epoch 488/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1341\n",
      "Epoch 489/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1340\n",
      "Epoch 490/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1340\n",
      "Epoch 491/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1339\n",
      "Epoch 492/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1338\n",
      "Epoch 493/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1338\n",
      "Epoch 494/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1337\n",
      "Epoch 495/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1337\n",
      "Epoch 496/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1336\n",
      "Epoch 497/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1335\n",
      "Epoch 498/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1334\n",
      "Epoch 499/500\n",
      "8999/8999 [==============================] - 0s 3us/step - loss: 0.1334\n",
      "Epoch 500/500\n",
      "8999/8999 [==============================] - 0s 2us/step - loss: 0.1333\n",
      "0\n",
      "NN-ACC: 0.8362040226691855\n",
      "NN-REC: 0.5280636108512629\n",
      "NN-F1: 0.605037513397642\n",
      "1\n",
      "NN-ACC: 0.848\n",
      "NN-REC: 0.5108538350217077\n",
      "NN-F1: 0.6075731497418244\n",
      "2\n",
      "NN-ACC: 0.8353333333333334\n",
      "NN-REC: 0.5471698113207547\n",
      "NN-F1: 0.6217457886676875\n"
     ]
    }
   ],
   "source": [
    "from keras.models import Sequential\n",
    "from keras.layers.core import Dense,Activation\n",
    "from keras.optimizers import SGD\n",
    "mdl = Sequential()\n",
    "mdl.add(Dense(50,input_dim=len(f_v[0])))\n",
    "mdl.add(Activation('sigmoid'))\n",
    "mdl.add(Dense(2))\n",
    "mdl.add(Activation('softmax'))\n",
    "sgd = SGD(lr=0.1)\n",
    "mdl.compile(loss='mse',optimizer='adam')\n",
    "mdl.fit(X_train,np.array([[0,1] if i==1 else [1,0] for i in Y_train]),epochs=500,batch_size=2048)\n",
    "\n",
    "xy_test = [(X_train,Y_train),(X_validation,Y_validation),(X_test,Y_test)]\n",
    "for i in range(len(xy_test)):\n",
    "    X_part = xy_test[i][0]\n",
    "    Y_part = xy_test[i][1]\n",
    "    Y_pred = mdl.predict_classes(X_part)\n",
    "    print(i)\n",
    "    print(\"NN-ACC:\",accuracy_score(Y_part,Y_pred))\n",
    "    print(\"NN-REC:\",recall_score(Y_part,Y_pred))\n",
    "    print(\"NN-F1:\",f1_score(Y_part,Y_pred))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 回归树和提升树--梯度提升决策树GBDT"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "GBDT -ACC: 0.9962218024224914\n",
      "GBDT -REC: 0.9883068288119738\n",
      "GBDT -F1: 0.9920187793427231\n",
      "1\n",
      "GBDT -ACC: 0.9823333333333333\n",
      "GBDT -REC: 0.9507959479015919\n",
      "GBDT -F1: 0.9612289685442575\n",
      "2\n",
      "GBDT -ACC: 0.981\n",
      "GBDT -REC: 0.9460916442048517\n",
      "GBDT -F1: 0.9609856262833677\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "('GBDT', GradientBoostingClassifier(criterion='friedman_mse', init=None,\n",
       "                            learning_rate=0.1, loss='deviance', max_depth=6,\n",
       "                            max_features=None, max_leaf_nodes=None,\n",
       "                            min_impurity_decrease=0.0, min_impurity_split=None,\n",
       "                            min_samples_leaf=1, min_samples_split=2,\n",
       "                            min_weight_fraction_leaf=0.0, n_estimators=100,\n",
       "                            n_iter_no_change=None, presort='auto',\n",
       "                            random_state=None, subsample=1.0, tol=0.0001,\n",
       "                            validation_fraction=0.1, verbose=0,\n",
       "                            warm_start=False))"
      ]
     },
     "execution_count": 27,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.ensemble import GradientBoostingClassifier\n",
    "models = []\n",
    "models.append(('GBDT',GradientBoostingClassifier(max_depth=6,n_estimators=100)))\n",
    "model_fit_print()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.8"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
