{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Read Data Sample"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:59:22.980228Z",
     "start_time": "2017-07-23T23:59:22.561678Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "import numpy as np\n",
    "import os\n",
    "from collections import namedtuple\n",
    "pd.set_option(\"display.max_rows\",100)\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-19T18:41:49.162575Z",
     "start_time": "2017-07-19T18:41:49.151675Z"
    },
    "collapsed": true
   },
   "source": [
    "%%bash\n",
    "rm dataset/scores/tf_dense_only_nsl_kdd_scores_all.pkl"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:59:23.078325Z",
     "start_time": "2017-07-23T23:59:22.981831Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class dataset:\n",
    "    kdd_train_2labels = pd.read_pickle(\"dataset/kdd_train_2labels.pkl\")\n",
    "    kdd_test_2labels = pd.read_pickle(\"dataset/kdd_test_2labels.pkl\")\n",
    "    kdd_test__2labels = pd.read_pickle(\"dataset/kdd_test__2labels.pkl\")\n",
    "\n",
    "    kdd_train_5labels = pd.read_pickle(\"dataset/kdd_train_5labels.pkl\")\n",
    "    kdd_test_5labels = pd.read_pickle(\"dataset/kdd_test_5labels.pkl\")\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:59:23.084970Z",
     "start_time": "2017-07-23T23:59:23.079981Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(125973, 124)"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dataset.kdd_train_2labels.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:59:23.090607Z",
     "start_time": "2017-07-23T23:59:23.086432Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(22544, 124)"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dataset.kdd_test_2labels.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:59:23.827870Z",
     "start_time": "2017-07-23T23:59:23.092180Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(125973, 122)"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn import model_selection as ms\n",
    "from sklearn import preprocessing as pp\n",
    "\n",
    "class preprocess:\n",
    "    \n",
    "    output_columns_2labels = ['is_Normal','is_Attack']\n",
    "    \n",
    "    x_input = dataset.kdd_train_2labels.drop(output_columns_2labels, axis = 1)\n",
    "    y_output = dataset.kdd_train_2labels.loc[:,output_columns_2labels]\n",
    "\n",
    "    x_test_input = dataset.kdd_test_2labels.drop(output_columns_2labels, axis = 1)\n",
    "    y_test = dataset.kdd_test_2labels.loc[:,output_columns_2labels]\n",
    "    \n",
    "    x_test__input = dataset.kdd_test__2labels.drop(output_columns_2labels, axis = 1)\n",
    "    y_test_ = dataset.kdd_test__2labels.loc[:,output_columns_2labels]\n",
    "\n",
    "    ss = pp.StandardScaler()\n",
    "\n",
    "    x_train = ss.fit_transform(x_input)\n",
    "    x_test = ss.transform(x_test_input)\n",
    "    x_test_ = ss.transform(x_test__input)\n",
    "\n",
    "    y_train = y_output.values\n",
    "    y_test = y_test.values\n",
    "    y_test_ = y_test_.values\n",
    "\n",
    "preprocess.x_train.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:59:24.950551Z",
     "start_time": "2017-07-23T23:59:23.829426Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import tensorflow as tf\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:59:25.139985Z",
     "start_time": "2017-07-23T23:59:24.952109Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class network(object):\n",
    "    \n",
    "    input_dim = 122\n",
    "    classes = 2\n",
    "    hidden_encoder_dim = 122\n",
    "    hidden_layers = 1\n",
    "    latent_dim = 18\n",
    "\n",
    "    def __init__(self, classes, hidden_layers, num_of_features):\n",
    "        self.classes = classes\n",
    "        self.hidden_layers = hidden_layers\n",
    "        self.latent_dim = num_of_features\n",
    "            \n",
    "    def build_layers(self):\n",
    "        tf.reset_default_graph()\n",
    "        #learning_rate = tf.Variable(initial_value=0.001)\n",
    "\n",
    "        input_dim = self.input_dim\n",
    "        classes = self.classes\n",
    "        hidden_encoder_dim = self.hidden_encoder_dim\n",
    "        hidden_layers = self.hidden_layers\n",
    "        latent_dim = self.latent_dim\n",
    "        \n",
    "        with tf.variable_scope(\"Input\"):\n",
    "            self.x = tf.placeholder(\"float\", shape=[None, input_dim])\n",
    "            self.y_ = tf.placeholder(\"float\", shape=[None, classes])\n",
    "            self.keep_prob = tf.placeholder(\"float\")\n",
    "            self.lr = tf.placeholder(\"float\")\n",
    "        \n",
    "        with tf.variable_scope(\"Layer_Encoder\"):\n",
    "\n",
    "            hidden_encoder = tf.layers.dense(self.x, hidden_encoder_dim, activation = tf.nn.relu, kernel_regularizer=tf.nn.l2_loss)\n",
    "            hidden_encoder = tf.nn.dropout(hidden_encoder, self.keep_prob)\n",
    "            for h in range(hidden_layers - 1):\n",
    "                hidden_encoder = tf.layers.dense(hidden_encoder, latent_dim, activation = tf.nn.relu, kernel_regularizer=tf.nn.l2_loss)\n",
    "                hidden_encoder = tf.nn.dropout(hidden_encoder, self.keep_prob)\n",
    "            \n",
    "            #hidden_encoder = tf.layers.dense(self.x, latent_dim, activation = tf.nn.relu, kernel_regularizer=tf.nn.l2_loss)\n",
    "            #hidden_encoder = tf.nn.dropout(hidden_encoder, self.keep_prob)\n",
    "            \n",
    "        with tf.variable_scope(\"Layer_Dense_Softmax\"):\n",
    "            self.y = tf.layers.dense(hidden_encoder, classes, activation=tf.nn.softmax)\n",
    "            \n",
    "        with tf.variable_scope(\"Loss\"):\n",
    "            \n",
    "            loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels = self.y_, logits = self.y))\n",
    "\n",
    "            #loss = tf.clip_by_value(loss, -1e-1, 1e-1)\n",
    "            #loss = tf.where(tf.is_nan(loss), 1e-1, loss)\n",
    "            #loss = tf.where(tf.equal(loss, -1e-1), tf.random_normal(loss.shape), loss)\n",
    "            #loss = tf.where(tf.equal(loss, 1e-1), tf.random_normal(loss.shape), loss)\n",
    "            \n",
    "            self.regularized_loss = loss\n",
    "            correct_prediction = tf.equal(tf.argmax(self.y_, 1), tf.argmax(self.y, 1))\n",
    "            self.tf_accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32), name = \"Accuracy\")\n",
    "\n",
    "        with tf.variable_scope(\"Optimizer\"):\n",
    "            learning_rate=self.lr\n",
    "            optimizer = tf.train.AdamOptimizer(learning_rate)\n",
    "            gradients, variables = zip(*optimizer.compute_gradients(self.regularized_loss))\n",
    "            gradients = [\n",
    "                None if gradient is None else tf.clip_by_value(gradient, -1, 1)\n",
    "                for gradient in gradients]\n",
    "            self.train_op = optimizer.apply_gradients(zip(gradients, variables))\n",
    "            #self.train_op = optimizer.minimize(self.regularized_loss)\n",
    "            \n",
    "        # add op for merging summary\n",
    "        #self.summary_op = tf.summary.merge_all()\n",
    "        self.pred = tf.argmax(self.y, axis = 1)\n",
    "        self.actual = tf.argmax(self.y_, axis = 1)\n",
    "\n",
    "        # add Saver ops\n",
    "        self.saver = tf.train.Saver()\n",
    "        "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:59:25.427262Z",
     "start_time": "2017-07-23T23:59:25.141622Z"
    },
    "collapsed": true,
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "import collections\n",
    "import time\n",
    "import sklearn.metrics as me \n",
    "\n",
    "class Train:    \n",
    "    \n",
    "    result = namedtuple(\"score\", ['epoch', 'no_of_features','hidden_layers','train_score', 'test_score', 'f1_score', 'test_score_20', 'f1_score_20', 'time_taken'])\n",
    "\n",
    "    predictions = {}\n",
    "    predictions_ = {}\n",
    "\n",
    "    results = []\n",
    "    \n",
    "    best_acc = 0\n",
    "    best_acc_global = 0\n",
    "    \n",
    "    def train(epochs, net, h,f, lrs):\n",
    "        batch_iterations = 200\n",
    "        train_loss = None\n",
    "        Train.best_acc = 0\n",
    "        os.makedirs(\"dataset/tf_dense_only_nsl_kdd/hidden layers_{}_features count_{}\".format(epochs,h,f),\n",
    "                    exist_ok = True)\n",
    "        with tf.Session() as sess:\n",
    "            #summary_writer_train = tf.summary.FileWriter('./logs/kdd/VAE/training', graph=sess.graph)\n",
    "            #summary_writer_valid = tf.summary.FileWriter('./logs/kdd/VAE/validation')\n",
    "\n",
    "            sess.run(tf.global_variables_initializer())\n",
    "            start_time = time.perf_counter()\n",
    "            for c, lr in enumerate(lrs):\n",
    "                for epoch in range(1, (epochs+1)):\n",
    "                    x_train, x_valid, y_train, y_valid, = ms.train_test_split(preprocess.x_train, \n",
    "                                                                              preprocess.y_train, \n",
    "                                                                              test_size=0.1)\n",
    "                    batch_indices = np.array_split(np.arange(x_train.shape[0]), \n",
    "                                               batch_iterations)\n",
    "\n",
    "                    for i in batch_indices:\n",
    "\n",
    "                        def train_batch():\n",
    "                            nonlocal train_loss\n",
    "                            _, train_loss = sess.run([net.train_op, \n",
    "                                                               net.regularized_loss, \n",
    "                                                               ], #net.summary_op\n",
    "                                                              feed_dict={net.x: x_train[i,:], \n",
    "                                                                         net.y_: y_train[i,:], \n",
    "                                                                         net.keep_prob:0.5, net.lr:lr})\n",
    "\n",
    "                        train_batch()\n",
    "                        #summary_writer_train.add_summary(summary_str, epoch)\n",
    "                        while((train_loss > 1e4 or np.isnan(train_loss)) and epoch > 1):\n",
    "                            print(\"Step {} | Training Loss: {:.6f}\".format(epoch, train_loss))\n",
    "                            net.saver.restore(sess, \n",
    "                                              tf.train.latest_checkpoint('dataset/tf_dense_only_nsl_kdd/hidden_layers_{}_features_count_{}'\n",
    "                                                                         .format(epochs,h,f)))\n",
    "                            train_batch()\n",
    "\n",
    "\n",
    "                    valid_accuracy = sess.run(net.tf_accuracy, #net.summary_op \n",
    "                                                          feed_dict={net.x: x_valid, \n",
    "                                                                     net.y_: y_valid, \n",
    "                                                                     net.keep_prob:1, net.lr:lr})\n",
    "                    #summary_writer_valid.add_summary(summary_str, epoch)\n",
    "\n",
    "\n",
    "                    accuracy, pred_value, actual_value, y_pred = sess.run([net.tf_accuracy, \n",
    "                                                                   net.pred, \n",
    "                                                                   net.actual, net.y], \n",
    "                                                                  feed_dict={net.x: preprocess.x_test, \n",
    "                                                                             net.y_: preprocess.y_test, \n",
    "                                                                             net.keep_prob:1, net.lr:lr})\n",
    "                    f1_score = me.f1_score(actual_value, pred_value)\n",
    "                    accuracy_, pred_value_, actual_value_, y_pred_ = sess.run([net.tf_accuracy, \n",
    "                                                                   net.pred, \n",
    "                                                                   net.actual, net.y], \n",
    "                                                                  feed_dict={net.x: preprocess.x_test_, \n",
    "                                                                             net.y_: preprocess.y_test_, \n",
    "                                                                             net.keep_prob:1, net.lr:lr})\n",
    "                    f1_score_ = me.f1_score(actual_value_, pred_value_)\n",
    "                    \n",
    "                    print(\"Step {} | Training Loss: {:.6f} | Validation Accuracy: {:.6f}\".format(epoch, train_loss, valid_accuracy))\n",
    "                    print(\"Accuracy on Test data: {}, {}\".format(accuracy, accuracy_))\n",
    "\n",
    "                    if accuracy > Train.best_acc_global:\n",
    "                        Train.best_acc_global = accuracy\n",
    "                        Train.pred_value = pred_value\n",
    "                        Train.actual_value = actual_value\n",
    "                        Train.pred_value_ = pred_value_\n",
    "                        Train.actual_value_ = actual_value_\n",
    "                        Train.best_parameters = \"Hidden Layers:{}, Features Count:{}\".format(h, f)\n",
    "\n",
    "                    if accuracy > Train.best_acc:\n",
    "                        Train.best_acc = accuracy\n",
    "\n",
    "                        if not (np.isnan(train_loss)):\n",
    "                            net.saver.save(sess, \n",
    "                                       \"dataset/tf_dense_only_nsl_kdd/hidden_layers_{}_features_count_{}\".format(h,f),\n",
    "                                        global_step = epochs)\n",
    "                        curr_pred = pd.DataFrame({\"Attack_prob\":y_pred[:,-2], \"Normal_prob\":y_pred[:, -1], \"Prediction\":pred_value, \"Actual\":actual_value})\n",
    "                        curr_pred_ = pd.DataFrame({\"Attack_prob\":y_pred_[:,-2], \"Normal_prob\":y_pred_[:, -1], \"Prediction\":pred_value_, \"Actual\": actual_value_})\n",
    "                        \n",
    "                        Train.predictions.update({\"{}_{}_{}\".format((epoch+1)*(c+1),f,h):(curr_pred, \n",
    "                                                   Train.result((epoch+1)*(c+1), f, h, valid_accuracy, accuracy, f1_score, accuracy_, f1_score_, time.perf_counter() - start_time))})\n",
    "                        Train.predictions_.update({\"{}_{}_{}\".format((epoch+1)*(c+1),f,h):(curr_pred_, \n",
    "                                                   Train.result((epoch+1)*(c+1), f, h, valid_accuracy, accuracy, f1_score, accuracy_, f1_score_, time.perf_counter() - start_time))})\n",
    "\n",
    "                        #Train.results.append(Train.result(epochs, f, h,valid_accuracy, accuracy))\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:59:25.521035Z",
     "start_time": "2017-07-23T23:59:25.428734Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import itertools\n",
    "\n",
    "df_results = []\n",
    "past_scores = []\n",
    "\n",
    "class Hyperparameters:\n",
    "#    features_arr = [2, 4, 8, 16, 32, 64, 128, 256]\n",
    "#    hidden_layers_arr = [2, 4, 6, 10]\n",
    "\n",
    "    def start_training():\n",
    "        print(\"********************************** Training ******************************\")\n",
    "\n",
    "        global df_results\n",
    "        global past_scores\n",
    "        Train.predictions = {}\n",
    "        Train.predictions_ = {}\n",
    "\n",
    "        Train.results = []\n",
    "    \n",
    "        \n",
    "        features_arr = [1, 12, 24, 48, 122]\n",
    "        hidden_layers_arr = [1, 3]\n",
    "\n",
    "        epochs = [5]\n",
    "        lrs = [1e-5, 1e-6]\n",
    "        print(\"********************************** Entering Loop ******************************\")\n",
    "\n",
    "        for e, h, f in itertools.product(epochs, hidden_layers_arr, features_arr):\n",
    "            print(\"Current Layer Attributes - epochs:{} hidden layers:{} features count:{}\".format(e,h,f))\n",
    "            n = network(2,h,f)\n",
    "            n.build_layers()\n",
    "            Train.train(e, n, h,f, lrs)\n",
    "            \n",
    "        dict1 = {}\n",
    "        dict1_ = {}\n",
    "        dict2 = []\n",
    "\n",
    "        for k, (v1, v2) in Train.predictions.items():\n",
    "            dict1.update({k: v1})\n",
    "            dict2.append(v2)\n",
    "\n",
    "        for k, (v1_, v2) in Train.predictions_.items():\n",
    "            dict1_.update({k: v1_})\n",
    "\n",
    "        Train.predictions = dict1\n",
    "        Train.predictions_ = dict1_\n",
    "        \n",
    "        Train.results = dict2\n",
    "        df_results = pd.DataFrame(Train.results)\n",
    "\n",
    "        #temp = df_results.set_index(['no_of_features', 'hidden_layers'])\n",
    "\n",
    "        if not os.path.isfile('dataset/scores/tf_dense_only_nsl_kdd_scores_all.pkl'):\n",
    "            past_scores = df_results\n",
    "        else:\n",
    "            past_scores = pd.read_pickle(\"dataset/scores/tf_dense_only_nsl_kdd_scores_all.pkl\")\n",
    "            past_scores = past_scores.append(df_results, ignore_index=True)\n",
    "        past_scores.to_pickle(\"dataset/scores/tf_dense_only_nsl_kdd_scores_all.pkl\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:48.410581Z",
     "start_time": "2017-07-23T23:59:25.522514Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "********************************** Training ******************************\n",
      "********************************** Entering Loop ******************************\n",
      "Current Layer Attributes - epochs:5 hidden layers:1 features count:1\n",
      "Step 1 | Training Loss: 0.704092 | Validation Accuracy: 0.645896\n",
      "Accuracy on Test data: 0.6112934947013855, 0.33789029717445374\n",
      "Step 2 | Training Loss: 0.640290 | Validation Accuracy: 0.774488\n",
      "Accuracy on Test data: 0.690693736076355, 0.4293670952320099\n",
      "Step 3 | Training Loss: 0.607380 | Validation Accuracy: 0.851961\n",
      "Accuracy on Test data: 0.7224538922309875, 0.48219409584999084\n",
      "Step 4 | Training Loss: 0.557025 | Validation Accuracy: 0.880060\n",
      "Accuracy on Test data: 0.7379790544509888, 0.5088607668876648\n",
      "Step 5 | Training Loss: 0.513937 | Validation Accuracy: 0.913081\n",
      "Accuracy on Test data: 0.7531493902206421, 0.5367088317871094\n",
      "Step 1 | Training Loss: 0.517118 | Validation Accuracy: 0.920384\n",
      "Accuracy on Test data: 0.7543914318084717, 0.5390717387199402\n",
      "Step 2 | Training Loss: 0.510755 | Validation Accuracy: 0.925067\n",
      "Accuracy on Test data: 0.7553229331970215, 0.5401687622070312\n",
      "Step 3 | Training Loss: 0.513694 | Validation Accuracy: 0.929513\n",
      "Accuracy on Test data: 0.756609320640564, 0.5422784686088562\n",
      "Step 4 | Training Loss: 0.510859 | Validation Accuracy: 0.927846\n",
      "Accuracy on Test data: 0.7574964761734009, 0.5438818335533142\n",
      "Step 5 | Training Loss: 0.502903 | Validation Accuracy: 0.933561\n",
      "Accuracy on Test data: 0.7586497664451599, 0.5459915399551392\n",
      "Current Layer Attributes - epochs:5 hidden layers:1 features count:12\n",
      "Step 1 | Training Loss: 0.714985 | Validation Accuracy: 0.520003\n",
      "Accuracy on Test data: 0.5994943380355835, 0.645232081413269\n",
      "Step 2 | Training Loss: 0.660893 | Validation Accuracy: 0.698365\n",
      "Accuracy on Test data: 0.7581618428230286, 0.65139240026474\n",
      "Step 3 | Training Loss: 0.607649 | Validation Accuracy: 0.789729\n",
      "Accuracy on Test data: 0.8019428849220276, 0.6635442972183228\n",
      "Step 4 | Training Loss: 0.579113 | Validation Accuracy: 0.862121\n",
      "Accuracy on Test data: 0.819109320640564, 0.6725738644599915\n",
      "Step 5 | Training Loss: 0.532859 | Validation Accuracy: 0.881965\n",
      "Accuracy on Test data: 0.8294003009796143, 0.6893671154975891\n",
      "Step 1 | Training Loss: 0.537820 | Validation Accuracy: 0.885537\n",
      "Accuracy on Test data: 0.8301987051963806, 0.6908860802650452\n",
      "Step 2 | Training Loss: 0.527341 | Validation Accuracy: 0.886411\n",
      "Accuracy on Test data: 0.8308640718460083, 0.6921519041061401\n",
      "Step 3 | Training Loss: 0.515487 | Validation Accuracy: 0.888236\n",
      "Accuracy on Test data: 0.8317512273788452, 0.6937552690505981\n",
      "Step 4 | Training Loss: 0.518110 | Validation Accuracy: 0.896174\n",
      "Accuracy on Test data: 0.8324165940284729, 0.6949366927146912\n",
      "Step 5 | Training Loss: 0.511986 | Validation Accuracy: 0.897841\n",
      "Accuracy on Test data: 0.8333480954170227, 0.6966244578361511\n",
      "Current Layer Attributes - epochs:5 hidden layers:1 features count:24\n",
      "Step 1 | Training Loss: 0.691997 | Validation Accuracy: 0.625655\n",
      "Accuracy on Test data: 0.7745741605758667, 0.706244707107544\n",
      "Step 2 | Training Loss: 0.638065 | Validation Accuracy: 0.740753\n",
      "Accuracy on Test data: 0.8202182650566101, 0.7161181569099426\n",
      "Step 3 | Training Loss: 0.590750 | Validation Accuracy: 0.856565\n",
      "Accuracy on Test data: 0.8501153588294983, 0.7293670773506165\n",
      "Step 4 | Training Loss: 0.555348 | Validation Accuracy: 0.892046\n",
      "Accuracy on Test data: 0.85801100730896, 0.7402531504631042\n",
      "Step 5 | Training Loss: 0.521922 | Validation Accuracy: 0.901810\n",
      "Accuracy on Test data: 0.8670599460601807, 0.7554430365562439\n",
      "Step 1 | Training Loss: 0.521976 | Validation Accuracy: 0.907049\n",
      "Accuracy on Test data: 0.867237389087677, 0.755696177482605\n",
      "Step 2 | Training Loss: 0.534170 | Validation Accuracy: 0.912208\n",
      "Accuracy on Test data: 0.866084098815918, 0.7535021305084229\n",
      "Step 3 | Training Loss: 0.515956 | Validation Accuracy: 0.914590\n",
      "Accuracy on Test data: 0.8597853183746338, 0.7413502335548401\n",
      "Step 4 | Training Loss: 0.529092 | Validation Accuracy: 0.915860\n",
      "Accuracy on Test data: 0.8555269837379456, 0.7332489490509033\n",
      "Step 5 | Training Loss: 0.512805 | Validation Accuracy: 0.919511\n",
      "Accuracy on Test data: 0.851091206073761, 0.7248101234436035\n",
      "Current Layer Attributes - epochs:5 hidden layers:1 features count:48\n",
      "Step 1 | Training Loss: 0.629006 | Validation Accuracy: 0.754961\n",
      "Accuracy on Test data: 0.7967973947525024, 0.7071729898452759\n",
      "Step 2 | Training Loss: 0.592226 | Validation Accuracy: 0.848389\n",
      "Accuracy on Test data: 0.8427075743675232, 0.7191561460494995\n",
      "Step 3 | Training Loss: 0.548871 | Validation Accuracy: 0.890776\n",
      "Accuracy on Test data: 0.8444375395774841, 0.7143459916114807\n",
      "Step 4 | Training Loss: 0.507608 | Validation Accuracy: 0.909192\n",
      "Accuracy on Test data: 0.8459900617599487, 0.7146835327148438\n",
      "Step 5 | Training Loss: 0.494673 | Validation Accuracy: 0.913796\n",
      "Accuracy on Test data: 0.8472321033477783, 0.7156118154525757\n",
      "Step 1 | Training Loss: 0.471714 | Validation Accuracy: 0.915701\n",
      "Accuracy on Test data: 0.8462561964988708, 0.7137552499771118\n",
      "Step 2 | Training Loss: 0.485304 | Validation Accuracy: 0.915860\n",
      "Accuracy on Test data: 0.8435060381889343, 0.7085232138633728\n",
      "Step 3 | Training Loss: 0.469505 | Validation Accuracy: 0.920702\n",
      "Accuracy on Test data: 0.843018114566803, 0.7075949311256409\n",
      "Step 4 | Training Loss: 0.469571 | Validation Accuracy: 0.917288\n",
      "Accuracy on Test data: 0.8431511521339417, 0.7077637314796448\n",
      "Step 5 | Training Loss: 0.470555 | Validation Accuracy: 0.924909\n",
      "Accuracy on Test data: 0.8429293632507324, 0.7069198489189148\n",
      "Current Layer Attributes - epochs:5 hidden layers:1 features count:122\n",
      "Step 1 | Training Loss: 0.675309 | Validation Accuracy: 0.629147\n",
      "Accuracy on Test data: 0.6239354014396667, 0.6817721724510193\n",
      "Step 2 | Training Loss: 0.612087 | Validation Accuracy: 0.753929\n",
      "Accuracy on Test data: 0.8024308085441589, 0.7309704422950745\n",
      "Step 3 | Training Loss: 0.593459 | Validation Accuracy: 0.829179\n",
      "Accuracy on Test data: 0.8449254631996155, 0.7784810066223145\n",
      "Step 4 | Training Loss: 0.545930 | Validation Accuracy: 0.901572\n",
      "Accuracy on Test data: 0.8462561964988708, 0.7356961965560913\n",
      "Step 5 | Training Loss: 0.515321 | Validation Accuracy: 0.919114\n",
      "Accuracy on Test data: 0.8518452644348145, 0.7337552905082703\n",
      "Step 1 | Training Loss: 0.499849 | Validation Accuracy: 0.916574\n",
      "Accuracy on Test data: 0.8523775935173035, 0.7337552905082703\n",
      "Step 2 | Training Loss: 0.503184 | Validation Accuracy: 0.914193\n",
      "Accuracy on Test data: 0.8522888422012329, 0.7330801486968994\n",
      "Step 3 | Training Loss: 0.490915 | Validation Accuracy: 0.925067\n",
      "Accuracy on Test data: 0.8513573408126831, 0.7312236428260803\n",
      "Step 4 | Training Loss: 0.481220 | Validation Accuracy: 0.925067\n",
      "Accuracy on Test data: 0.8463449478149414, 0.7214345932006836\n",
      "Step 5 | Training Loss: 0.497714 | Validation Accuracy: 0.921972\n",
      "Accuracy on Test data: 0.8456795811653137, 0.7198312282562256\n",
      "Current Layer Attributes - epochs:5 hidden layers:3 features count:1\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/ritesh_malaiya/anaconda3/envs/p3/lib/python3.6/site-packages/sklearn/metrics/classification.py:1113: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 due to no predicted samples.\n",
      "  'precision', 'predicted', average, warn_for)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 1 | Training Loss: 0.693097 | Validation Accuracy: 0.529925\n",
      "Accuracy on Test data: 0.43075764179229736, 0.18160337209701538\n",
      "Step 2 | Training Loss: 0.693101 | Validation Accuracy: 0.533021\n",
      "Accuracy on Test data: 0.43075764179229736, 0.18160337209701538\n",
      "Step 3 | Training Loss: 0.693125 | Validation Accuracy: 0.537625\n",
      "Accuracy on Test data: 0.43075764179229736, 0.18160337209701538\n",
      "Step 4 | Training Loss: 0.692802 | Validation Accuracy: 0.536276\n",
      "Accuracy on Test data: 0.43075764179229736, 0.18160337209701538\n",
      "Step 5 | Training Loss: 0.692893 | Validation Accuracy: 0.531513\n",
      "Accuracy on Test data: 0.43075764179229736, 0.18160337209701538\n",
      "Step 1 | Training Loss: 0.692826 | Validation Accuracy: 0.539609\n",
      "Accuracy on Test data: 0.43075764179229736, 0.18160337209701538\n",
      "Step 2 | Training Loss: 0.692912 | Validation Accuracy: 0.530402\n",
      "Accuracy on Test data: 0.43075764179229736, 0.18160337209701538\n",
      "Step 3 | Training Loss: 0.692909 | Validation Accuracy: 0.535482\n",
      "Accuracy on Test data: 0.43075764179229736, 0.18160337209701538\n",
      "Step 4 | Training Loss: 0.692509 | Validation Accuracy: 0.532783\n",
      "Accuracy on Test data: 0.43075764179229736, 0.18160337209701538\n",
      "Step 5 | Training Loss: 0.692771 | Validation Accuracy: 0.538260\n",
      "Accuracy on Test data: 0.43075764179229736, 0.18160337209701538\n",
      "Current Layer Attributes - epochs:5 hidden layers:3 features count:12\n",
      "Step 1 | Training Loss: 0.747871 | Validation Accuracy: 0.302270\n",
      "Accuracy on Test data: 0.49574166536331177, 0.753333330154419\n",
      "Step 2 | Training Loss: 0.705273 | Validation Accuracy: 0.346563\n",
      "Accuracy on Test data: 0.5076295137405396, 0.7574683427810669\n",
      "Step 3 | Training Loss: 0.705840 | Validation Accuracy: 0.590252\n",
      "Accuracy on Test data: 0.6894517540931702, 0.7579746842384338\n",
      "Step 4 | Training Loss: 0.704902 | Validation Accuracy: 0.745753\n",
      "Accuracy on Test data: 0.8082416653633118, 0.7280168533325195\n",
      "Step 5 | Training Loss: 0.665448 | Validation Accuracy: 0.796237\n",
      "Accuracy on Test data: 0.828025221824646, 0.7273417711257935\n",
      "Step 1 | Training Loss: 0.662826 | Validation Accuracy: 0.794491\n",
      "Accuracy on Test data: 0.8286905884742737, 0.7275949120521545\n",
      "Step 2 | Training Loss: 0.678499 | Validation Accuracy: 0.797746\n",
      "Accuracy on Test data: 0.8290454149246216, 0.7281012535095215\n",
      "Step 3 | Training Loss: 0.667243 | Validation Accuracy: 0.801000\n",
      "Accuracy on Test data: 0.8294003009796143, 0.7286075949668884\n",
      "Step 4 | Training Loss: 0.652857 | Validation Accuracy: 0.797508\n",
      "Accuracy on Test data: 0.8300212621688843, 0.7297890186309814\n",
      "Step 5 | Training Loss: 0.681853 | Validation Accuracy: 0.801873\n",
      "Accuracy on Test data: 0.8305092453956604, 0.7302109599113464\n",
      "Current Layer Attributes - epochs:5 hidden layers:3 features count:24\n",
      "Step 1 | Training Loss: 0.688986 | Validation Accuracy: 0.804969\n",
      "Accuracy on Test data: 0.7739975452423096, 0.6429535746574402\n",
      "Step 2 | Training Loss: 0.641489 | Validation Accuracy: 0.868471\n",
      "Accuracy on Test data: 0.8045599460601807, 0.6561181545257568\n",
      "Step 3 | Training Loss: 0.619636 | Validation Accuracy: 0.890776\n",
      "Accuracy on Test data: 0.8147178888320923, 0.6610126495361328\n",
      "Step 4 | Training Loss: 0.607743 | Validation Accuracy: 0.907287\n",
      "Accuracy on Test data: 0.8205287456512451, 0.6669198274612427\n",
      "Step 5 | Training Loss: 0.615180 | Validation Accuracy: 0.915066\n",
      "Accuracy on Test data: 0.8207948803901672, 0.6650632619857788\n",
      "Step 1 | Training Loss: 0.602186 | Validation Accuracy: 0.910462\n",
      "Accuracy on Test data: 0.8219038248062134, 0.6671729683876038\n",
      "Step 2 | Training Loss: 0.597917 | Validation Accuracy: 0.915860\n",
      "Accuracy on Test data: 0.8218594789505005, 0.6670886278152466\n",
      "Step 3 | Training Loss: 0.589698 | Validation Accuracy: 0.917050\n",
      "Accuracy on Test data: 0.8219038248062134, 0.6670886278152466\n",
      "Step 4 | Training Loss: 0.604934 | Validation Accuracy: 0.918955\n",
      "Accuracy on Test data: 0.821726381778717, 0.6667510271072388\n",
      "Step 5 | Training Loss: 0.590326 | Validation Accuracy: 0.922527\n",
      "Accuracy on Test data: 0.8216376900672913, 0.6664978861808777\n",
      "Current Layer Attributes - epochs:5 hidden layers:3 features count:48\n",
      "Step 1 | Training Loss: 0.671034 | Validation Accuracy: 0.815288\n",
      "Accuracy on Test data: 0.6449165940284729, 0.4879325032234192\n",
      "Step 2 | Training Loss: 0.650770 | Validation Accuracy: 0.899428\n",
      "Accuracy on Test data: 0.799237072467804, 0.6342616081237793\n",
      "Step 3 | Training Loss: 0.627489 | Validation Accuracy: 0.926179\n",
      "Accuracy on Test data: 0.8395581841468811, 0.7070042490959167\n",
      "Step 4 | Training Loss: 0.571884 | Validation Accuracy: 0.925226\n",
      "Accuracy on Test data: 0.8531316518783569, 0.7314767837524414\n",
      "Step 5 | Training Loss: 0.572623 | Validation Accuracy: 0.922289\n",
      "Accuracy on Test data: 0.8635557293891907, 0.751139223575592\n",
      "Step 1 | Training Loss: 0.591467 | Validation Accuracy: 0.926973\n",
      "Accuracy on Test data: 0.8635113835334778, 0.7510548233985901\n",
      "Step 2 | Training Loss: 0.535877 | Validation Accuracy: 0.926258\n",
      "Accuracy on Test data: 0.8661284446716309, 0.7560337781906128\n",
      "Step 3 | Training Loss: 0.565682 | Validation Accuracy: 0.926258\n",
      "Accuracy on Test data: 0.8664389848709106, 0.756540060043335\n",
      "Step 4 | Training Loss: 0.545241 | Validation Accuracy: 0.923797\n",
      "Accuracy on Test data: 0.8666163682937622, 0.7568776607513428\n",
      "Step 5 | Training Loss: 0.536672 | Validation Accuracy: 0.924750\n",
      "Accuracy on Test data: 0.8669712543487549, 0.7575527429580688\n",
      "Current Layer Attributes - epochs:5 hidden layers:3 features count:122\n",
      "Step 1 | Training Loss: 0.674893 | Validation Accuracy: 0.746150\n",
      "Accuracy on Test data: 0.8286018371582031, 0.7761181592941284\n",
      "Step 2 | Training Loss: 0.617569 | Validation Accuracy: 0.860137\n",
      "Accuracy on Test data: 0.8515791296958923, 0.7477636933326721\n",
      "Step 3 | Training Loss: 0.579240 | Validation Accuracy: 0.899032\n",
      "Accuracy on Test data: 0.8380944132804871, 0.7081012725830078\n",
      "Step 4 | Training Loss: 0.567790 | Validation Accuracy: 0.907366\n",
      "Accuracy on Test data: 0.8240330219268799, 0.6759493947029114\n",
      "Step 5 | Training Loss: 0.521153 | Validation Accuracy: 0.915304\n",
      "Accuracy on Test data: 0.8080198764801025, 0.6442193984985352\n",
      "Step 1 | Training Loss: 0.494850 | Validation Accuracy: 0.913796\n",
      "Accuracy on Test data: 0.8050479292869568, 0.6383122205734253\n",
      "Step 2 | Training Loss: 0.513884 | Validation Accuracy: 0.914193\n",
      "Accuracy on Test data: 0.8018985390663147, 0.6323207020759583\n",
      "Step 3 | Training Loss: 0.502671 | Validation Accuracy: 0.911732\n",
      "Accuracy on Test data: 0.7997693419456482, 0.6282700300216675\n",
      "Step 4 | Training Loss: 0.508571 | Validation Accuracy: 0.918797\n",
      "Accuracy on Test data: 0.7975514531135559, 0.6240506172180176\n",
      "Step 5 | Training Loss: 0.499664 | Validation Accuracy: 0.910541\n",
      "Accuracy on Test data: 0.7956440448760986, 0.6202531456947327\n"
     ]
    }
   ],
   "source": [
    "#%%timeit -r 10\n",
    "#capture\n",
    "Hyperparameters.start_training()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-06-18T20:12:01.591604Z",
     "start_time": "2017-06-18T20:12:01.586451Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:48.414452Z",
     "start_time": "2017-07-24T00:01:48.412069Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#g = df_results.groupby(by=['no_of_features'])\n",
    "#idx = g['test_score'].transform(max) == df_results['test_score']\n",
    "#df_results[idx].sort_values(by = 'test_score', ascending = False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:48.422358Z",
     "start_time": "2017-07-24T00:01:48.415783Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#g = df_results.groupby(by=['no_of_features'])\n",
    "#idx = g['test_score_20'].transform(max) == df_results['test_score_20']\n",
    "#df_results[idx].sort_values(by = 'test_score_20', ascending = False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:48.427692Z",
     "start_time": "2017-07-24T00:01:48.423680Z"
    },
    "collapsed": true,
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "#df_results.sort_values(by = 'test_score', ascending = False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:48.431797Z",
     "start_time": "2017-07-24T00:01:48.429059Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#Train.predictions_"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:48.823384Z",
     "start_time": "2017-07-24T00:01:48.433146Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "pd.Panel(Train.predictions).to_pickle(\"dataset/tf_dense_only_nsl_kdd_predictions.pkl\")\n",
    "pd.Panel(Train.predictions_).to_pickle(\"dataset/tf_dense_only_nsl_kdd_predictions__.pkl\")\n",
    "\n",
    "df_results.to_pickle(\"dataset/tf_dense_only_nsl_kdd_scores.pkl\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:48.897431Z",
     "start_time": "2017-07-24T00:01:48.824977Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import itertools\n",
    "\n",
    "def plot_confusion_matrix(cm, classes,\n",
    "                          normalize=False,\n",
    "                          title='Confusion matrix',\n",
    "                          cmap=plt.cm.Blues):\n",
    "    \"\"\"\n",
    "    This function prints and plots the confusion matrix.\n",
    "    Normalization can be applied by setting `normalize=True`.\n",
    "    \"\"\"\n",
    "    np.set_printoptions(precision=4)\n",
    "\n",
    "    plt.imshow(cm, interpolation='nearest', cmap=cmap)\n",
    "    plt.title(title)\n",
    "    plt.colorbar()\n",
    "    tick_marks = np.arange(len(classes))\n",
    "    plt.xticks(tick_marks, classes, rotation=45)\n",
    "    plt.yticks(tick_marks, classes)\n",
    "\n",
    "    if normalize:\n",
    "        cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n",
    "        #print(\"Normalized confusion matrix\")\n",
    "    else:\n",
    "        #print('Confusion matrix, without normalization')\n",
    "        pass\n",
    "    \n",
    "    #print(cm)\n",
    "\n",
    "    label = [[\"\\n True Negative\", \"\\n False Positive \\n Type II Error\"],\n",
    "             [\"\\n False Negative \\n Type I Error\", \"\\n True Positive\"]\n",
    "            ]\n",
    "    \n",
    "    thresh = cm.max() / 2.\n",
    "    for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):\n",
    "        \n",
    "        plt.text(j, i, \"{} {}\".format(cm[i, j].round(4), label[i][j]),\n",
    "                 horizontalalignment=\"center\",\n",
    "                 color=\"white\" if cm[i, j] > thresh else \"black\")\n",
    "\n",
    "    plt.tight_layout()\n",
    "    plt.ylabel('True label')\n",
    "    plt.xlabel('Predicted label')\n",
    "\n",
    "def plot(actual_value, pred_value):\n",
    "    from sklearn.metrics import confusion_matrix\n",
    "\n",
    "    cm_2labels = confusion_matrix(y_pred = pred_value, y_true = actual_value)\n",
    "    plt.figure(figsize=[6,6])\n",
    "    plot_confusion_matrix(cm_2labels, ['Normal', 'Attack'], normalize = False)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:48.945091Z",
     "start_time": "2017-07-24T00:01:48.898843Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#plot(actual_value = Train.actual_value, pred_value = Train.pred_value)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:48.952216Z",
     "start_time": "2017-07-24T00:01:48.946613Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#plot(actual_value = Train.actual_value_, pred_value = Train.pred_value_)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:48.959244Z",
     "start_time": "2017-07-24T00:01:48.953604Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "past_scores = pd.read_pickle(\"dataset/scores/tf_dense_only_nsl_kdd_scores_all.pkl\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.010387Z",
     "start_time": "2017-07-24T00:01:48.960670Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>epoch</th>\n",
       "      <th>f1_score</th>\n",
       "      <th>f1_score_20</th>\n",
       "      <th>hidden_layers</th>\n",
       "      <th>no_of_features</th>\n",
       "      <th>test_score</th>\n",
       "      <th>test_score_20</th>\n",
       "      <th>time_taken</th>\n",
       "      <th>train_score</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>1110</th>\n",
       "      <td>12</td>\n",
       "      <td>0.873902</td>\n",
       "      <td>0.834989</td>\n",
       "      <td>3.0</td>\n",
       "      <td>48.0</td>\n",
       "      <td>0.866971</td>\n",
       "      <td>0.757553</td>\n",
       "      <td>18.979939</td>\n",
       "      <td>0.924750</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1109</th>\n",
       "      <td>10</td>\n",
       "      <td>0.873501</td>\n",
       "      <td>0.834416</td>\n",
       "      <td>3.0</td>\n",
       "      <td>48.0</td>\n",
       "      <td>0.866616</td>\n",
       "      <td>0.756878</td>\n",
       "      <td>17.183680</td>\n",
       "      <td>0.923797</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1108</th>\n",
       "      <td>8</td>\n",
       "      <td>0.873269</td>\n",
       "      <td>0.834071</td>\n",
       "      <td>3.0</td>\n",
       "      <td>48.0</td>\n",
       "      <td>0.866439</td>\n",
       "      <td>0.756540</td>\n",
       "      <td>15.182953</td>\n",
       "      <td>0.926258</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1107</th>\n",
       "      <td>6</td>\n",
       "      <td>0.872948</td>\n",
       "      <td>0.833669</td>\n",
       "      <td>3.0</td>\n",
       "      <td>48.0</td>\n",
       "      <td>0.866128</td>\n",
       "      <td>0.756034</td>\n",
       "      <td>13.206024</td>\n",
       "      <td>0.926258</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1076</th>\n",
       "      <td>4</td>\n",
       "      <td>0.872752</td>\n",
       "      <td>0.831559</td>\n",
       "      <td>1.0</td>\n",
       "      <td>24.0</td>\n",
       "      <td>0.867237</td>\n",
       "      <td>0.755696</td>\n",
       "      <td>6.615212</td>\n",
       "      <td>0.907049</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1078</th>\n",
       "      <td>6</td>\n",
       "      <td>0.872539</td>\n",
       "      <td>0.831315</td>\n",
       "      <td>1.0</td>\n",
       "      <td>24.0</td>\n",
       "      <td>0.867060</td>\n",
       "      <td>0.755443</td>\n",
       "      <td>5.527226</td>\n",
       "      <td>0.901810</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1112</th>\n",
       "      <td>3</td>\n",
       "      <td>0.868402</td>\n",
       "      <td>0.841002</td>\n",
       "      <td>3.0</td>\n",
       "      <td>122.0</td>\n",
       "      <td>0.851579</td>\n",
       "      <td>0.747764</td>\n",
       "      <td>4.354387</td>\n",
       "      <td>0.860137</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1086</th>\n",
       "      <td>4</td>\n",
       "      <td>0.862763</td>\n",
       "      <td>0.823989</td>\n",
       "      <td>1.0</td>\n",
       "      <td>122.0</td>\n",
       "      <td>0.852378</td>\n",
       "      <td>0.733755</td>\n",
       "      <td>7.787664</td>\n",
       "      <td>0.916574</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1077</th>\n",
       "      <td>5</td>\n",
       "      <td>0.862754</td>\n",
       "      <td>0.818941</td>\n",
       "      <td>1.0</td>\n",
       "      <td>24.0</td>\n",
       "      <td>0.858011</td>\n",
       "      <td>0.740253</td>\n",
       "      <td>4.420664</td>\n",
       "      <td>0.892046</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1088</th>\n",
       "      <td>6</td>\n",
       "      <td>0.862359</td>\n",
       "      <td>0.824165</td>\n",
       "      <td>1.0</td>\n",
       "      <td>122.0</td>\n",
       "      <td>0.851845</td>\n",
       "      <td>0.733755</td>\n",
       "      <td>6.520284</td>\n",
       "      <td>0.919114</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1111</th>\n",
       "      <td>2</td>\n",
       "      <td>0.861654</td>\n",
       "      <td>0.870263</td>\n",
       "      <td>3.0</td>\n",
       "      <td>122.0</td>\n",
       "      <td>0.828602</td>\n",
       "      <td>0.776118</td>\n",
       "      <td>2.195020</td>\n",
       "      <td>0.746150</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1087</th>\n",
       "      <td>5</td>\n",
       "      <td>0.858542</td>\n",
       "      <td>0.827095</td>\n",
       "      <td>1.0</td>\n",
       "      <td>122.0</td>\n",
       "      <td>0.846256</td>\n",
       "      <td>0.735696</td>\n",
       "      <td>5.203199</td>\n",
       "      <td>0.901572</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1106</th>\n",
       "      <td>5</td>\n",
       "      <td>0.858122</td>\n",
       "      <td>0.812448</td>\n",
       "      <td>3.0</td>\n",
       "      <td>48.0</td>\n",
       "      <td>0.853132</td>\n",
       "      <td>0.731477</td>\n",
       "      <td>7.784258</td>\n",
       "      <td>0.925226</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1097</th>\n",
       "      <td>12</td>\n",
       "      <td>0.857121</td>\n",
       "      <td>0.839242</td>\n",
       "      <td>3.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.830509</td>\n",
       "      <td>0.730211</td>\n",
       "      <td>16.995091</td>\n",
       "      <td>0.801873</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1096</th>\n",
       "      <td>10</td>\n",
       "      <td>0.856716</td>\n",
       "      <td>0.838982</td>\n",
       "      <td>3.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.830021</td>\n",
       "      <td>0.729789</td>\n",
       "      <td>15.335974</td>\n",
       "      <td>0.797508</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1095</th>\n",
       "      <td>8</td>\n",
       "      <td>0.856138</td>\n",
       "      <td>0.838197</td>\n",
       "      <td>3.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.829400</td>\n",
       "      <td>0.728608</td>\n",
       "      <td>13.734848</td>\n",
       "      <td>0.801000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1094</th>\n",
       "      <td>6</td>\n",
       "      <td>0.855882</td>\n",
       "      <td>0.837944</td>\n",
       "      <td>3.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.829045</td>\n",
       "      <td>0.728101</td>\n",
       "      <td>12.027643</td>\n",
       "      <td>0.797746</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1092</th>\n",
       "      <td>4</td>\n",
       "      <td>0.855637</td>\n",
       "      <td>0.837724</td>\n",
       "      <td>3.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.828691</td>\n",
       "      <td>0.727595</td>\n",
       "      <td>10.303682</td>\n",
       "      <td>0.794491</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1083</th>\n",
       "      <td>6</td>\n",
       "      <td>0.854733</td>\n",
       "      <td>0.806544</td>\n",
       "      <td>1.0</td>\n",
       "      <td>48.0</td>\n",
       "      <td>0.847232</td>\n",
       "      <td>0.715612</td>\n",
       "      <td>5.875551</td>\n",
       "      <td>0.913796</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1082</th>\n",
       "      <td>5</td>\n",
       "      <td>0.853477</td>\n",
       "      <td>0.805835</td>\n",
       "      <td>1.0</td>\n",
       "      <td>48.0</td>\n",
       "      <td>0.845990</td>\n",
       "      <td>0.714684</td>\n",
       "      <td>4.594229</td>\n",
       "      <td>0.909192</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1080</th>\n",
       "      <td>3</td>\n",
       "      <td>0.852312</td>\n",
       "      <td>0.811743</td>\n",
       "      <td>1.0</td>\n",
       "      <td>48.0</td>\n",
       "      <td>0.842708</td>\n",
       "      <td>0.719156</td>\n",
       "      <td>2.295216</td>\n",
       "      <td>0.848389</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1081</th>\n",
       "      <td>4</td>\n",
       "      <td>0.852032</td>\n",
       "      <td>0.805783</td>\n",
       "      <td>1.0</td>\n",
       "      <td>48.0</td>\n",
       "      <td>0.844438</td>\n",
       "      <td>0.714346</td>\n",
       "      <td>3.419154</td>\n",
       "      <td>0.890776</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1105</th>\n",
       "      <td>4</td>\n",
       "      <td>0.842595</td>\n",
       "      <td>0.791095</td>\n",
       "      <td>3.0</td>\n",
       "      <td>48.0</td>\n",
       "      <td>0.839558</td>\n",
       "      <td>0.707004</td>\n",
       "      <td>5.737464</td>\n",
       "      <td>0.926179</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1093</th>\n",
       "      <td>5</td>\n",
       "      <td>0.841468</td>\n",
       "      <td>0.838503</td>\n",
       "      <td>3.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.808242</td>\n",
       "      <td>0.728017</td>\n",
       "      <td>6.960655</td>\n",
       "      <td>0.745753</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1073</th>\n",
       "      <td>12</td>\n",
       "      <td>0.837183</td>\n",
       "      <td>0.785078</td>\n",
       "      <td>1.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.833348</td>\n",
       "      <td>0.696624</td>\n",
       "      <td>10.911904</td>\n",
       "      <td>0.897841</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1072</th>\n",
       "      <td>10</td>\n",
       "      <td>0.836167</td>\n",
       "      <td>0.783675</td>\n",
       "      <td>1.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.832417</td>\n",
       "      <td>0.694937</td>\n",
       "      <td>9.859136</td>\n",
       "      <td>0.896174</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1071</th>\n",
       "      <td>8</td>\n",
       "      <td>0.835452</td>\n",
       "      <td>0.782708</td>\n",
       "      <td>1.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.831751</td>\n",
       "      <td>0.693755</td>\n",
       "      <td>8.743644</td>\n",
       "      <td>0.888236</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1085</th>\n",
       "      <td>3</td>\n",
       "      <td>0.835378</td>\n",
       "      <td>0.838909</td>\n",
       "      <td>1.0</td>\n",
       "      <td>122.0</td>\n",
       "      <td>0.802431</td>\n",
       "      <td>0.730970</td>\n",
       "      <td>2.609813</td>\n",
       "      <td>0.753929</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1070</th>\n",
       "      <td>6</td>\n",
       "      <td>0.834512</td>\n",
       "      <td>0.781426</td>\n",
       "      <td>1.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.830864</td>\n",
       "      <td>0.692152</td>\n",
       "      <td>7.700856</td>\n",
       "      <td>0.886411</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1068</th>\n",
       "      <td>4</td>\n",
       "      <td>0.833782</td>\n",
       "      <td>0.780383</td>\n",
       "      <td>1.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.830199</td>\n",
       "      <td>0.690886</td>\n",
       "      <td>6.587520</td>\n",
       "      <td>0.885537</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1100</th>\n",
       "      <td>4</td>\n",
       "      <td>0.826648</td>\n",
       "      <td>0.765712</td>\n",
       "      <td>3.0</td>\n",
       "      <td>24.0</td>\n",
       "      <td>0.821904</td>\n",
       "      <td>0.667173</td>\n",
       "      <td>10.274888</td>\n",
       "      <td>0.910462</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1102</th>\n",
       "      <td>6</td>\n",
       "      <td>0.825365</td>\n",
       "      <td>0.763848</td>\n",
       "      <td>3.0</td>\n",
       "      <td>24.0</td>\n",
       "      <td>0.820795</td>\n",
       "      <td>0.665063</td>\n",
       "      <td>8.518722</td>\n",
       "      <td>0.915066</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1101</th>\n",
       "      <td>5</td>\n",
       "      <td>0.825287</td>\n",
       "      <td>0.765437</td>\n",
       "      <td>3.0</td>\n",
       "      <td>24.0</td>\n",
       "      <td>0.820529</td>\n",
       "      <td>0.666920</td>\n",
       "      <td>6.885301</td>\n",
       "      <td>0.907287</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1069</th>\n",
       "      <td>5</td>\n",
       "      <td>0.822696</td>\n",
       "      <td>0.766715</td>\n",
       "      <td>1.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.819109</td>\n",
       "      <td>0.672574</td>\n",
       "      <td>4.404348</td>\n",
       "      <td>0.862121</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1075</th>\n",
       "      <td>3</td>\n",
       "      <td>0.822057</td>\n",
       "      <td>0.800522</td>\n",
       "      <td>1.0</td>\n",
       "      <td>24.0</td>\n",
       "      <td>0.820218</td>\n",
       "      <td>0.716118</td>\n",
       "      <td>2.220499</td>\n",
       "      <td>0.740753</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1079</th>\n",
       "      <td>2</td>\n",
       "      <td>0.820515</td>\n",
       "      <td>0.810775</td>\n",
       "      <td>1.0</td>\n",
       "      <td>48.0</td>\n",
       "      <td>0.796797</td>\n",
       "      <td>0.707173</td>\n",
       "      <td>1.140302</td>\n",
       "      <td>0.754961</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1099</th>\n",
       "      <td>3</td>\n",
       "      <td>0.811693</td>\n",
       "      <td>0.758604</td>\n",
       "      <td>3.0</td>\n",
       "      <td>24.0</td>\n",
       "      <td>0.804560</td>\n",
       "      <td>0.656118</td>\n",
       "      <td>3.421427</td>\n",
       "      <td>0.868471</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1104</th>\n",
       "      <td>3</td>\n",
       "      <td>0.793352</td>\n",
       "      <td>0.722357</td>\n",
       "      <td>3.0</td>\n",
       "      <td>48.0</td>\n",
       "      <td>0.799237</td>\n",
       "      <td>0.634262</td>\n",
       "      <td>3.780687</td>\n",
       "      <td>0.899428</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1098</th>\n",
       "      <td>2</td>\n",
       "      <td>0.788633</td>\n",
       "      <td>0.751920</td>\n",
       "      <td>3.0</td>\n",
       "      <td>24.0</td>\n",
       "      <td>0.773998</td>\n",
       "      <td>0.642954</td>\n",
       "      <td>1.715472</td>\n",
       "      <td>0.804969</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1074</th>\n",
       "      <td>2</td>\n",
       "      <td>0.781137</td>\n",
       "      <td>0.799885</td>\n",
       "      <td>1.0</td>\n",
       "      <td>24.0</td>\n",
       "      <td>0.774574</td>\n",
       "      <td>0.706245</td>\n",
       "      <td>1.152475</td>\n",
       "      <td>0.625655</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1067</th>\n",
       "      <td>3</td>\n",
       "      <td>0.755472</td>\n",
       "      <td>0.750769</td>\n",
       "      <td>1.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.758162</td>\n",
       "      <td>0.651392</td>\n",
       "      <td>2.215889</td>\n",
       "      <td>0.698365</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1065</th>\n",
       "      <td>12</td>\n",
       "      <td>0.749205</td>\n",
       "      <td>0.650831</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.758650</td>\n",
       "      <td>0.545992</td>\n",
       "      <td>10.894768</td>\n",
       "      <td>0.933561</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1064</th>\n",
       "      <td>10</td>\n",
       "      <td>0.747704</td>\n",
       "      <td>0.648638</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.757496</td>\n",
       "      <td>0.543882</td>\n",
       "      <td>9.778554</td>\n",
       "      <td>0.927846</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1063</th>\n",
       "      <td>8</td>\n",
       "      <td>0.746547</td>\n",
       "      <td>0.646967</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.756609</td>\n",
       "      <td>0.542278</td>\n",
       "      <td>8.735577</td>\n",
       "      <td>0.929513</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1062</th>\n",
       "      <td>6</td>\n",
       "      <td>0.744889</td>\n",
       "      <td>0.644808</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.755323</td>\n",
       "      <td>0.540169</td>\n",
       "      <td>7.690903</td>\n",
       "      <td>0.925067</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1060</th>\n",
       "      <td>4</td>\n",
       "      <td>0.743693</td>\n",
       "      <td>0.643752</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.754391</td>\n",
       "      <td>0.539072</td>\n",
       "      <td>6.562054</td>\n",
       "      <td>0.920384</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1061</th>\n",
       "      <td>5</td>\n",
       "      <td>0.721670</td>\n",
       "      <td>0.611326</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.737979</td>\n",
       "      <td>0.508861</td>\n",
       "      <td>4.381407</td>\n",
       "      <td>0.880060</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1084</th>\n",
       "      <td>2</td>\n",
       "      <td>0.680172</td>\n",
       "      <td>0.806922</td>\n",
       "      <td>1.0</td>\n",
       "      <td>122.0</td>\n",
       "      <td>0.623935</td>\n",
       "      <td>0.681772</td>\n",
       "      <td>1.334918</td>\n",
       "      <td>0.629147</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1091</th>\n",
       "      <td>3</td>\n",
       "      <td>0.670975</td>\n",
       "      <td>0.861293</td>\n",
       "      <td>3.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.507630</td>\n",
       "      <td>0.757468</td>\n",
       "      <td>3.625248</td>\n",
       "      <td>0.346563</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1059</th>\n",
       "      <td>3</td>\n",
       "      <td>0.664485</td>\n",
       "      <td>0.535194</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.690694</td>\n",
       "      <td>0.429367</td>\n",
       "      <td>2.229519</td>\n",
       "      <td>0.774488</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1008</th>\n",
       "      <td>2</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.752528</td>\n",
       "      <td>0.571561</td>\n",
       "      <td>1.565016</td>\n",
       "      <td>0.789332</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1009</th>\n",
       "      <td>3</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.786329</td>\n",
       "      <td>0.604641</td>\n",
       "      <td>3.070189</td>\n",
       "      <td>0.823940</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1010</th>\n",
       "      <td>4</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.797773</td>\n",
       "      <td>0.623797</td>\n",
       "      <td>4.573471</td>\n",
       "      <td>0.854342</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1011</th>\n",
       "      <td>5</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.811036</td>\n",
       "      <td>0.647426</td>\n",
       "      <td>6.084401</td>\n",
       "      <td>0.872678</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1012</th>\n",
       "      <td>6</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.818045</td>\n",
       "      <td>0.658903</td>\n",
       "      <td>7.585594</td>\n",
       "      <td>0.891729</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1013</th>\n",
       "      <td>7</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.822968</td>\n",
       "      <td>0.668017</td>\n",
       "      <td>9.087819</td>\n",
       "      <td>0.897127</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1014</th>\n",
       "      <td>2</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.784732</td>\n",
       "      <td>0.657468</td>\n",
       "      <td>2.059196</td>\n",
       "      <td>0.781711</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1015</th>\n",
       "      <td>3</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.798527</td>\n",
       "      <td>0.645232</td>\n",
       "      <td>4.067227</td>\n",
       "      <td>0.853469</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1016</th>\n",
       "      <td>4</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.809617</td>\n",
       "      <td>0.656962</td>\n",
       "      <td>6.087417</td>\n",
       "      <td>0.905461</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1017</th>\n",
       "      <td>5</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.812012</td>\n",
       "      <td>0.654852</td>\n",
       "      <td>8.131453</td>\n",
       "      <td>0.922369</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1018</th>\n",
       "      <td>6</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.812766</td>\n",
       "      <td>0.652996</td>\n",
       "      <td>10.171448</td>\n",
       "      <td>0.939832</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1019</th>\n",
       "      <td>7</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.814452</td>\n",
       "      <td>0.654008</td>\n",
       "      <td>12.186608</td>\n",
       "      <td>0.944436</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1020</th>\n",
       "      <td>2</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.430758</td>\n",
       "      <td>0.181603</td>\n",
       "      <td>1.472629</td>\n",
       "      <td>0.539054</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1021</th>\n",
       "      <td>2</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.430758</td>\n",
       "      <td>0.181603</td>\n",
       "      <td>1.650742</td>\n",
       "      <td>0.533736</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1022</th>\n",
       "      <td>9</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.661817</td>\n",
       "      <td>0.377722</td>\n",
       "      <td>32.087380</td>\n",
       "      <td>0.857358</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1023</th>\n",
       "      <td>10</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.495963</td>\n",
       "      <td>0.224135</td>\n",
       "      <td>19.576402</td>\n",
       "      <td>0.709001</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1024</th>\n",
       "      <td>11</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.471079</td>\n",
       "      <td>0.183966</td>\n",
       "      <td>13.169223</td>\n",
       "      <td>0.641372</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1025</th>\n",
       "      <td>4</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.476269</td>\n",
       "      <td>0.192152</td>\n",
       "      <td>14.774089</td>\n",
       "      <td>0.655818</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1026</th>\n",
       "      <td>6</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.659466</td>\n",
       "      <td>0.373502</td>\n",
       "      <td>30.513274</td>\n",
       "      <td>0.856088</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1027</th>\n",
       "      <td>8</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.490463</td>\n",
       "      <td>0.215105</td>\n",
       "      <td>17.982180</td>\n",
       "      <td>0.693443</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1028</th>\n",
       "      <td>12</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.665410</td>\n",
       "      <td>0.383713</td>\n",
       "      <td>33.647811</td>\n",
       "      <td>0.860057</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1029</th>\n",
       "      <td>14</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.587473</td>\n",
       "      <td>0.242110</td>\n",
       "      <td>22.724931</td>\n",
       "      <td>0.752104</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1030</th>\n",
       "      <td>16</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.596744</td>\n",
       "      <td>0.258312</td>\n",
       "      <td>24.286118</td>\n",
       "      <td>0.783299</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1031</th>\n",
       "      <td>18</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.668027</td>\n",
       "      <td>0.387511</td>\n",
       "      <td>36.816779</td>\n",
       "      <td>0.864423</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1032</th>\n",
       "      <td>20</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.629791</td>\n",
       "      <td>0.318481</td>\n",
       "      <td>27.448016</td>\n",
       "      <td>0.832910</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1033</th>\n",
       "      <td>22</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.656361</td>\n",
       "      <td>0.367679</td>\n",
       "      <td>28.963678</td>\n",
       "      <td>0.859422</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1034</th>\n",
       "      <td>15</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.666918</td>\n",
       "      <td>0.385907</td>\n",
       "      <td>35.223553</td>\n",
       "      <td>0.865534</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1035</th>\n",
       "      <td>21</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.669003</td>\n",
       "      <td>0.389114</td>\n",
       "      <td>38.440099</td>\n",
       "      <td>0.866010</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1036</th>\n",
       "      <td>24</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.669846</td>\n",
       "      <td>0.390464</td>\n",
       "      <td>40.048072</td>\n",
       "      <td>0.866090</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1037</th>\n",
       "      <td>27</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.670866</td>\n",
       "      <td>0.392152</td>\n",
       "      <td>41.646417</td>\n",
       "      <td>0.866884</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1038</th>\n",
       "      <td>30</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.671753</td>\n",
       "      <td>0.393840</td>\n",
       "      <td>43.278384</td>\n",
       "      <td>0.871249</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1039</th>\n",
       "      <td>33</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.673039</td>\n",
       "      <td>0.396034</td>\n",
       "      <td>44.906827</td>\n",
       "      <td>0.865296</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1040</th>\n",
       "      <td>2</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.513529</td>\n",
       "      <td>0.532152</td>\n",
       "      <td>1.949590</td>\n",
       "      <td>0.510398</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1041</th>\n",
       "      <td>3</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.645626</td>\n",
       "      <td>0.551392</td>\n",
       "      <td>3.848985</td>\n",
       "      <td>0.652246</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1042</th>\n",
       "      <td>4</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.868879</td>\n",
       "      <td>0.761688</td>\n",
       "      <td>20.928807</td>\n",
       "      <td>0.909589</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1043</th>\n",
       "      <td>5</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.786018</td>\n",
       "      <td>0.629958</td>\n",
       "      <td>7.647516</td>\n",
       "      <td>0.830846</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1044</th>\n",
       "      <td>6</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.869721</td>\n",
       "      <td>0.763122</td>\n",
       "      <td>22.827152</td>\n",
       "      <td>0.920464</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1045</th>\n",
       "      <td>7</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.819198</td>\n",
       "      <td>0.671983</td>\n",
       "      <td>11.425825</td>\n",
       "      <td>0.876806</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1046</th>\n",
       "      <td>8</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.827227</td>\n",
       "      <td>0.682025</td>\n",
       "      <td>13.333037</td>\n",
       "      <td>0.878949</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1047</th>\n",
       "      <td>9</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.836719</td>\n",
       "      <td>0.699578</td>\n",
       "      <td>15.241036</td>\n",
       "      <td>0.888792</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1048</th>\n",
       "      <td>10</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.869899</td>\n",
       "      <td>0.763207</td>\n",
       "      <td>26.090022</td>\n",
       "      <td>0.922289</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1049</th>\n",
       "      <td>11</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.867770</td>\n",
       "      <td>0.759241</td>\n",
       "      <td>19.046752</td>\n",
       "      <td>0.902524</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1050</th>\n",
       "      <td>2</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.610273</td>\n",
       "      <td>0.458819</td>\n",
       "      <td>3.025803</td>\n",
       "      <td>0.795126</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1051</th>\n",
       "      <td>3</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.801544</td>\n",
       "      <td>0.634768</td>\n",
       "      <td>5.939004</td>\n",
       "      <td>0.913875</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1052</th>\n",
       "      <td>4</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.818754</td>\n",
       "      <td>0.663460</td>\n",
       "      <td>8.846303</td>\n",
       "      <td>0.930862</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1053</th>\n",
       "      <td>5</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.826872</td>\n",
       "      <td>0.676456</td>\n",
       "      <td>11.801530</td>\n",
       "      <td>0.935069</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1054</th>\n",
       "      <td>6</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.833171</td>\n",
       "      <td>0.688101</td>\n",
       "      <td>14.741884</td>\n",
       "      <td>0.941737</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1055</th>\n",
       "      <td>7</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.847276</td>\n",
       "      <td>0.714262</td>\n",
       "      <td>17.673385</td>\n",
       "      <td>0.942531</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1056</th>\n",
       "      <td>8</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.853220</td>\n",
       "      <td>0.725148</td>\n",
       "      <td>20.603261</td>\n",
       "      <td>0.945388</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1057</th>\n",
       "      <td>9</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.858410</td>\n",
       "      <td>0.734599</td>\n",
       "      <td>23.537852</td>\n",
       "      <td>0.946896</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>1113 rows × 9 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "      epoch  f1_score  f1_score_20  hidden_layers  no_of_features  test_score  \\\n",
       "1110     12  0.873902     0.834989            3.0            48.0    0.866971   \n",
       "1109     10  0.873501     0.834416            3.0            48.0    0.866616   \n",
       "1108      8  0.873269     0.834071            3.0            48.0    0.866439   \n",
       "1107      6  0.872948     0.833669            3.0            48.0    0.866128   \n",
       "1076      4  0.872752     0.831559            1.0            24.0    0.867237   \n",
       "1078      6  0.872539     0.831315            1.0            24.0    0.867060   \n",
       "1112      3  0.868402     0.841002            3.0           122.0    0.851579   \n",
       "1086      4  0.862763     0.823989            1.0           122.0    0.852378   \n",
       "1077      5  0.862754     0.818941            1.0            24.0    0.858011   \n",
       "1088      6  0.862359     0.824165            1.0           122.0    0.851845   \n",
       "1111      2  0.861654     0.870263            3.0           122.0    0.828602   \n",
       "1087      5  0.858542     0.827095            1.0           122.0    0.846256   \n",
       "1106      5  0.858122     0.812448            3.0            48.0    0.853132   \n",
       "1097     12  0.857121     0.839242            3.0            12.0    0.830509   \n",
       "1096     10  0.856716     0.838982            3.0            12.0    0.830021   \n",
       "1095      8  0.856138     0.838197            3.0            12.0    0.829400   \n",
       "1094      6  0.855882     0.837944            3.0            12.0    0.829045   \n",
       "1092      4  0.855637     0.837724            3.0            12.0    0.828691   \n",
       "1083      6  0.854733     0.806544            1.0            48.0    0.847232   \n",
       "1082      5  0.853477     0.805835            1.0            48.0    0.845990   \n",
       "1080      3  0.852312     0.811743            1.0            48.0    0.842708   \n",
       "1081      4  0.852032     0.805783            1.0            48.0    0.844438   \n",
       "1105      4  0.842595     0.791095            3.0            48.0    0.839558   \n",
       "1093      5  0.841468     0.838503            3.0            12.0    0.808242   \n",
       "1073     12  0.837183     0.785078            1.0            12.0    0.833348   \n",
       "1072     10  0.836167     0.783675            1.0            12.0    0.832417   \n",
       "1071      8  0.835452     0.782708            1.0            12.0    0.831751   \n",
       "1085      3  0.835378     0.838909            1.0           122.0    0.802431   \n",
       "1070      6  0.834512     0.781426            1.0            12.0    0.830864   \n",
       "1068      4  0.833782     0.780383            1.0            12.0    0.830199   \n",
       "1100      4  0.826648     0.765712            3.0            24.0    0.821904   \n",
       "1102      6  0.825365     0.763848            3.0            24.0    0.820795   \n",
       "1101      5  0.825287     0.765437            3.0            24.0    0.820529   \n",
       "1069      5  0.822696     0.766715            1.0            12.0    0.819109   \n",
       "1075      3  0.822057     0.800522            1.0            24.0    0.820218   \n",
       "1079      2  0.820515     0.810775            1.0            48.0    0.796797   \n",
       "1099      3  0.811693     0.758604            3.0            24.0    0.804560   \n",
       "1104      3  0.793352     0.722357            3.0            48.0    0.799237   \n",
       "1098      2  0.788633     0.751920            3.0            24.0    0.773998   \n",
       "1074      2  0.781137     0.799885            1.0            24.0    0.774574   \n",
       "1067      3  0.755472     0.750769            1.0            12.0    0.758162   \n",
       "1065     12  0.749205     0.650831            1.0             1.0    0.758650   \n",
       "1064     10  0.747704     0.648638            1.0             1.0    0.757496   \n",
       "1063      8  0.746547     0.646967            1.0             1.0    0.756609   \n",
       "1062      6  0.744889     0.644808            1.0             1.0    0.755323   \n",
       "1060      4  0.743693     0.643752            1.0             1.0    0.754391   \n",
       "1061      5  0.721670     0.611326            1.0             1.0    0.737979   \n",
       "1084      2  0.680172     0.806922            1.0           122.0    0.623935   \n",
       "1091      3  0.670975     0.861293            3.0            12.0    0.507630   \n",
       "1059      3  0.664485     0.535194            1.0             1.0    0.690694   \n",
       "...     ...       ...          ...            ...             ...         ...   \n",
       "1008      2       NaN          NaN            NaN             NaN    0.752528   \n",
       "1009      3       NaN          NaN            NaN             NaN    0.786329   \n",
       "1010      4       NaN          NaN            NaN             NaN    0.797773   \n",
       "1011      5       NaN          NaN            NaN             NaN    0.811036   \n",
       "1012      6       NaN          NaN            NaN             NaN    0.818045   \n",
       "1013      7       NaN          NaN            NaN             NaN    0.822968   \n",
       "1014      2       NaN          NaN            NaN             NaN    0.784732   \n",
       "1015      3       NaN          NaN            NaN             NaN    0.798527   \n",
       "1016      4       NaN          NaN            NaN             NaN    0.809617   \n",
       "1017      5       NaN          NaN            NaN             NaN    0.812012   \n",
       "1018      6       NaN          NaN            NaN             NaN    0.812766   \n",
       "1019      7       NaN          NaN            NaN             NaN    0.814452   \n",
       "1020      2       NaN          NaN            NaN             NaN    0.430758   \n",
       "1021      2       NaN          NaN            NaN             NaN    0.430758   \n",
       "1022      9       NaN          NaN            NaN             NaN    0.661817   \n",
       "1023     10       NaN          NaN            NaN             NaN    0.495963   \n",
       "1024     11       NaN          NaN            NaN             NaN    0.471079   \n",
       "1025      4       NaN          NaN            NaN             NaN    0.476269   \n",
       "1026      6       NaN          NaN            NaN             NaN    0.659466   \n",
       "1027      8       NaN          NaN            NaN             NaN    0.490463   \n",
       "1028     12       NaN          NaN            NaN             NaN    0.665410   \n",
       "1029     14       NaN          NaN            NaN             NaN    0.587473   \n",
       "1030     16       NaN          NaN            NaN             NaN    0.596744   \n",
       "1031     18       NaN          NaN            NaN             NaN    0.668027   \n",
       "1032     20       NaN          NaN            NaN             NaN    0.629791   \n",
       "1033     22       NaN          NaN            NaN             NaN    0.656361   \n",
       "1034     15       NaN          NaN            NaN             NaN    0.666918   \n",
       "1035     21       NaN          NaN            NaN             NaN    0.669003   \n",
       "1036     24       NaN          NaN            NaN             NaN    0.669846   \n",
       "1037     27       NaN          NaN            NaN             NaN    0.670866   \n",
       "1038     30       NaN          NaN            NaN             NaN    0.671753   \n",
       "1039     33       NaN          NaN            NaN             NaN    0.673039   \n",
       "1040      2       NaN          NaN            NaN             NaN    0.513529   \n",
       "1041      3       NaN          NaN            NaN             NaN    0.645626   \n",
       "1042      4       NaN          NaN            NaN             NaN    0.868879   \n",
       "1043      5       NaN          NaN            NaN             NaN    0.786018   \n",
       "1044      6       NaN          NaN            NaN             NaN    0.869721   \n",
       "1045      7       NaN          NaN            NaN             NaN    0.819198   \n",
       "1046      8       NaN          NaN            NaN             NaN    0.827227   \n",
       "1047      9       NaN          NaN            NaN             NaN    0.836719   \n",
       "1048     10       NaN          NaN            NaN             NaN    0.869899   \n",
       "1049     11       NaN          NaN            NaN             NaN    0.867770   \n",
       "1050      2       NaN          NaN            NaN             NaN    0.610273   \n",
       "1051      3       NaN          NaN            NaN             NaN    0.801544   \n",
       "1052      4       NaN          NaN            NaN             NaN    0.818754   \n",
       "1053      5       NaN          NaN            NaN             NaN    0.826872   \n",
       "1054      6       NaN          NaN            NaN             NaN    0.833171   \n",
       "1055      7       NaN          NaN            NaN             NaN    0.847276   \n",
       "1056      8       NaN          NaN            NaN             NaN    0.853220   \n",
       "1057      9       NaN          NaN            NaN             NaN    0.858410   \n",
       "\n",
       "      test_score_20  time_taken  train_score  \n",
       "1110       0.757553   18.979939     0.924750  \n",
       "1109       0.756878   17.183680     0.923797  \n",
       "1108       0.756540   15.182953     0.926258  \n",
       "1107       0.756034   13.206024     0.926258  \n",
       "1076       0.755696    6.615212     0.907049  \n",
       "1078       0.755443    5.527226     0.901810  \n",
       "1112       0.747764    4.354387     0.860137  \n",
       "1086       0.733755    7.787664     0.916574  \n",
       "1077       0.740253    4.420664     0.892046  \n",
       "1088       0.733755    6.520284     0.919114  \n",
       "1111       0.776118    2.195020     0.746150  \n",
       "1087       0.735696    5.203199     0.901572  \n",
       "1106       0.731477    7.784258     0.925226  \n",
       "1097       0.730211   16.995091     0.801873  \n",
       "1096       0.729789   15.335974     0.797508  \n",
       "1095       0.728608   13.734848     0.801000  \n",
       "1094       0.728101   12.027643     0.797746  \n",
       "1092       0.727595   10.303682     0.794491  \n",
       "1083       0.715612    5.875551     0.913796  \n",
       "1082       0.714684    4.594229     0.909192  \n",
       "1080       0.719156    2.295216     0.848389  \n",
       "1081       0.714346    3.419154     0.890776  \n",
       "1105       0.707004    5.737464     0.926179  \n",
       "1093       0.728017    6.960655     0.745753  \n",
       "1073       0.696624   10.911904     0.897841  \n",
       "1072       0.694937    9.859136     0.896174  \n",
       "1071       0.693755    8.743644     0.888236  \n",
       "1085       0.730970    2.609813     0.753929  \n",
       "1070       0.692152    7.700856     0.886411  \n",
       "1068       0.690886    6.587520     0.885537  \n",
       "1100       0.667173   10.274888     0.910462  \n",
       "1102       0.665063    8.518722     0.915066  \n",
       "1101       0.666920    6.885301     0.907287  \n",
       "1069       0.672574    4.404348     0.862121  \n",
       "1075       0.716118    2.220499     0.740753  \n",
       "1079       0.707173    1.140302     0.754961  \n",
       "1099       0.656118    3.421427     0.868471  \n",
       "1104       0.634262    3.780687     0.899428  \n",
       "1098       0.642954    1.715472     0.804969  \n",
       "1074       0.706245    1.152475     0.625655  \n",
       "1067       0.651392    2.215889     0.698365  \n",
       "1065       0.545992   10.894768     0.933561  \n",
       "1064       0.543882    9.778554     0.927846  \n",
       "1063       0.542278    8.735577     0.929513  \n",
       "1062       0.540169    7.690903     0.925067  \n",
       "1060       0.539072    6.562054     0.920384  \n",
       "1061       0.508861    4.381407     0.880060  \n",
       "1084       0.681772    1.334918     0.629147  \n",
       "1091       0.757468    3.625248     0.346563  \n",
       "1059       0.429367    2.229519     0.774488  \n",
       "...             ...         ...          ...  \n",
       "1008       0.571561    1.565016     0.789332  \n",
       "1009       0.604641    3.070189     0.823940  \n",
       "1010       0.623797    4.573471     0.854342  \n",
       "1011       0.647426    6.084401     0.872678  \n",
       "1012       0.658903    7.585594     0.891729  \n",
       "1013       0.668017    9.087819     0.897127  \n",
       "1014       0.657468    2.059196     0.781711  \n",
       "1015       0.645232    4.067227     0.853469  \n",
       "1016       0.656962    6.087417     0.905461  \n",
       "1017       0.654852    8.131453     0.922369  \n",
       "1018       0.652996   10.171448     0.939832  \n",
       "1019       0.654008   12.186608     0.944436  \n",
       "1020       0.181603    1.472629     0.539054  \n",
       "1021       0.181603    1.650742     0.533736  \n",
       "1022       0.377722   32.087380     0.857358  \n",
       "1023       0.224135   19.576402     0.709001  \n",
       "1024       0.183966   13.169223     0.641372  \n",
       "1025       0.192152   14.774089     0.655818  \n",
       "1026       0.373502   30.513274     0.856088  \n",
       "1027       0.215105   17.982180     0.693443  \n",
       "1028       0.383713   33.647811     0.860057  \n",
       "1029       0.242110   22.724931     0.752104  \n",
       "1030       0.258312   24.286118     0.783299  \n",
       "1031       0.387511   36.816779     0.864423  \n",
       "1032       0.318481   27.448016     0.832910  \n",
       "1033       0.367679   28.963678     0.859422  \n",
       "1034       0.385907   35.223553     0.865534  \n",
       "1035       0.389114   38.440099     0.866010  \n",
       "1036       0.390464   40.048072     0.866090  \n",
       "1037       0.392152   41.646417     0.866884  \n",
       "1038       0.393840   43.278384     0.871249  \n",
       "1039       0.396034   44.906827     0.865296  \n",
       "1040       0.532152    1.949590     0.510398  \n",
       "1041       0.551392    3.848985     0.652246  \n",
       "1042       0.761688   20.928807     0.909589  \n",
       "1043       0.629958    7.647516     0.830846  \n",
       "1044       0.763122   22.827152     0.920464  \n",
       "1045       0.671983   11.425825     0.876806  \n",
       "1046       0.682025   13.333037     0.878949  \n",
       "1047       0.699578   15.241036     0.888792  \n",
       "1048       0.763207   26.090022     0.922289  \n",
       "1049       0.759241   19.046752     0.902524  \n",
       "1050       0.458819    3.025803     0.795126  \n",
       "1051       0.634768    5.939004     0.913875  \n",
       "1052       0.663460    8.846303     0.930862  \n",
       "1053       0.676456   11.801530     0.935069  \n",
       "1054       0.688101   14.741884     0.941737  \n",
       "1055       0.714262   17.673385     0.942531  \n",
       "1056       0.725148   20.603261     0.945388  \n",
       "1057       0.734599   23.537852     0.946896  \n",
       "\n",
       "[1113 rows x 9 columns]"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "past_scores.sort_values(by='f1_score',ascending=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.032870Z",
     "start_time": "2017-07-24T00:01:49.011786Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th>epoch</th>\n",
       "      <th>f1_score</th>\n",
       "      <th>f1_score_20</th>\n",
       "      <th>test_score</th>\n",
       "      <th>test_score_20</th>\n",
       "      <th>time_taken</th>\n",
       "      <th>train_score</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>no_of_features</th>\n",
       "      <th>hidden_layers</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>24.0</th>\n",
       "      <th>1.0</th>\n",
       "      <td>4</td>\n",
       "      <td>0.872752</td>\n",
       "      <td>0.831559</td>\n",
       "      <td>0.867237</td>\n",
       "      <td>0.755696</td>\n",
       "      <td>6.615212</td>\n",
       "      <td>0.907049</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>48.0</th>\n",
       "      <th>3.0</th>\n",
       "      <td>12</td>\n",
       "      <td>0.873902</td>\n",
       "      <td>0.834989</td>\n",
       "      <td>0.866971</td>\n",
       "      <td>0.757553</td>\n",
       "      <td>18.979939</td>\n",
       "      <td>0.924750</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th rowspan=\"2\" valign=\"top\">122.0</th>\n",
       "      <th>1.0</th>\n",
       "      <td>4</td>\n",
       "      <td>0.862763</td>\n",
       "      <td>0.823989</td>\n",
       "      <td>0.852378</td>\n",
       "      <td>0.733755</td>\n",
       "      <td>7.787664</td>\n",
       "      <td>0.916574</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3.0</th>\n",
       "      <td>3</td>\n",
       "      <td>0.868402</td>\n",
       "      <td>0.841002</td>\n",
       "      <td>0.851579</td>\n",
       "      <td>0.747764</td>\n",
       "      <td>4.354387</td>\n",
       "      <td>0.860137</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>48.0</th>\n",
       "      <th>1.0</th>\n",
       "      <td>6</td>\n",
       "      <td>0.854733</td>\n",
       "      <td>0.806544</td>\n",
       "      <td>0.847232</td>\n",
       "      <td>0.715612</td>\n",
       "      <td>5.875551</td>\n",
       "      <td>0.913796</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th rowspan=\"2\" valign=\"top\">12.0</th>\n",
       "      <th>1.0</th>\n",
       "      <td>12</td>\n",
       "      <td>0.837183</td>\n",
       "      <td>0.785078</td>\n",
       "      <td>0.833348</td>\n",
       "      <td>0.696624</td>\n",
       "      <td>10.911904</td>\n",
       "      <td>0.897841</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3.0</th>\n",
       "      <td>12</td>\n",
       "      <td>0.857121</td>\n",
       "      <td>0.839242</td>\n",
       "      <td>0.830509</td>\n",
       "      <td>0.730211</td>\n",
       "      <td>16.995091</td>\n",
       "      <td>0.801873</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24.0</th>\n",
       "      <th>3.0</th>\n",
       "      <td>4</td>\n",
       "      <td>0.826648</td>\n",
       "      <td>0.765712</td>\n",
       "      <td>0.821904</td>\n",
       "      <td>0.667173</td>\n",
       "      <td>10.274888</td>\n",
       "      <td>0.910462</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th rowspan=\"2\" valign=\"top\">1.0</th>\n",
       "      <th>1.0</th>\n",
       "      <td>12</td>\n",
       "      <td>0.749205</td>\n",
       "      <td>0.650831</td>\n",
       "      <td>0.758650</td>\n",
       "      <td>0.545992</td>\n",
       "      <td>10.894768</td>\n",
       "      <td>0.933561</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3.0</th>\n",
       "      <td>2</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.430758</td>\n",
       "      <td>0.181603</td>\n",
       "      <td>1.590742</td>\n",
       "      <td>0.529925</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                              epoch  f1_score  f1_score_20  test_score  \\\n",
       "no_of_features hidden_layers                                             \n",
       "24.0           1.0                4  0.872752     0.831559    0.867237   \n",
       "48.0           3.0               12  0.873902     0.834989    0.866971   \n",
       "122.0          1.0                4  0.862763     0.823989    0.852378   \n",
       "               3.0                3  0.868402     0.841002    0.851579   \n",
       "48.0           1.0                6  0.854733     0.806544    0.847232   \n",
       "12.0           1.0               12  0.837183     0.785078    0.833348   \n",
       "               3.0               12  0.857121     0.839242    0.830509   \n",
       "24.0           3.0                4  0.826648     0.765712    0.821904   \n",
       "1.0            1.0               12  0.749205     0.650831    0.758650   \n",
       "               3.0                2  0.000000     0.000000    0.430758   \n",
       "\n",
       "                              test_score_20  time_taken  train_score  \n",
       "no_of_features hidden_layers                                          \n",
       "24.0           1.0                 0.755696    6.615212     0.907049  \n",
       "48.0           3.0                 0.757553   18.979939     0.924750  \n",
       "122.0          1.0                 0.733755    7.787664     0.916574  \n",
       "               3.0                 0.747764    4.354387     0.860137  \n",
       "48.0           1.0                 0.715612    5.875551     0.913796  \n",
       "12.0           1.0                 0.696624   10.911904     0.897841  \n",
       "               3.0                 0.730211   16.995091     0.801873  \n",
       "24.0           3.0                 0.667173   10.274888     0.910462  \n",
       "1.0            1.0                 0.545992   10.894768     0.933561  \n",
       "               3.0                 0.181603    1.590742     0.529925  "
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "psg = past_scores.sort_values(by='test_score', ascending=False).groupby(by=['no_of_features', 'hidden_layers'])\n",
    "psg.first().sort_values(by='test_score', ascending=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.052619Z",
     "start_time": "2017-07-24T00:01:49.034209Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th>epoch</th>\n",
       "      <th>f1_score</th>\n",
       "      <th>f1_score_20</th>\n",
       "      <th>test_score</th>\n",
       "      <th>test_score_20</th>\n",
       "      <th>time_taken</th>\n",
       "      <th>train_score</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>no_of_features</th>\n",
       "      <th>hidden_layers</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>122.0</th>\n",
       "      <th>3.0</th>\n",
       "      <td>2.50</td>\n",
       "      <td>0.865028</td>\n",
       "      <td>0.855632</td>\n",
       "      <td>0.840090</td>\n",
       "      <td>0.761941</td>\n",
       "      <td>3.274703</td>\n",
       "      <td>0.803143</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24.0</th>\n",
       "      <th>1.0</th>\n",
       "      <td>4.00</td>\n",
       "      <td>0.842248</td>\n",
       "      <td>0.816444</td>\n",
       "      <td>0.837420</td>\n",
       "      <td>0.734751</td>\n",
       "      <td>3.987215</td>\n",
       "      <td>0.813462</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th rowspan=\"2\" valign=\"top\">48.0</th>\n",
       "      <th>1.0</th>\n",
       "      <td>4.00</td>\n",
       "      <td>0.846614</td>\n",
       "      <td>0.808136</td>\n",
       "      <td>0.835433</td>\n",
       "      <td>0.714194</td>\n",
       "      <td>3.464890</td>\n",
       "      <td>0.863423</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3.0</th>\n",
       "      <td>6.25</td>\n",
       "      <td>0.818815</td>\n",
       "      <td>0.777829</td>\n",
       "      <td>0.825375</td>\n",
       "      <td>0.698460</td>\n",
       "      <td>10.472351</td>\n",
       "      <td>0.908398</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24.0</th>\n",
       "      <th>3.0</th>\n",
       "      <td>4.00</td>\n",
       "      <td>0.815525</td>\n",
       "      <td>0.761104</td>\n",
       "      <td>0.808357</td>\n",
       "      <td>0.659646</td>\n",
       "      <td>6.163162</td>\n",
       "      <td>0.881251</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>122.0</th>\n",
       "      <th>1.0</th>\n",
       "      <td>4.00</td>\n",
       "      <td>0.819843</td>\n",
       "      <td>0.824216</td>\n",
       "      <td>0.795369</td>\n",
       "      <td>0.723190</td>\n",
       "      <td>4.691176</td>\n",
       "      <td>0.824067</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th rowspan=\"2\" valign=\"top\">12.0</th>\n",
       "      <th>1.0</th>\n",
       "      <td>6.25</td>\n",
       "      <td>0.800505</td>\n",
       "      <td>0.772256</td>\n",
       "      <td>0.791918</td>\n",
       "      <td>0.679694</td>\n",
       "      <td>6.445614</td>\n",
       "      <td>0.816836</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3.0</th>\n",
       "      <td>6.25</td>\n",
       "      <td>0.806903</td>\n",
       "      <td>0.843851</td>\n",
       "      <td>0.744910</td>\n",
       "      <td>0.735390</td>\n",
       "      <td>10.099620</td>\n",
       "      <td>0.673401</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th rowspan=\"2\" valign=\"top\">1.0</th>\n",
       "      <th>1.0</th>\n",
       "      <td>6.25</td>\n",
       "      <td>0.709750</td>\n",
       "      <td>0.603261</td>\n",
       "      <td>0.727805</td>\n",
       "      <td>0.498439</td>\n",
       "      <td>6.430936</td>\n",
       "      <td>0.867102</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3.0</th>\n",
       "      <td>2.00</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.430758</td>\n",
       "      <td>0.181603</td>\n",
       "      <td>1.590742</td>\n",
       "      <td>0.529925</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                              epoch  f1_score  f1_score_20  test_score  \\\n",
       "no_of_features hidden_layers                                             \n",
       "122.0          3.0             2.50  0.865028     0.855632    0.840090   \n",
       "24.0           1.0             4.00  0.842248     0.816444    0.837420   \n",
       "48.0           1.0             4.00  0.846614     0.808136    0.835433   \n",
       "               3.0             6.25  0.818815     0.777829    0.825375   \n",
       "24.0           3.0             4.00  0.815525     0.761104    0.808357   \n",
       "122.0          1.0             4.00  0.819843     0.824216    0.795369   \n",
       "12.0           1.0             6.25  0.800505     0.772256    0.791918   \n",
       "               3.0             6.25  0.806903     0.843851    0.744910   \n",
       "1.0            1.0             6.25  0.709750     0.603261    0.727805   \n",
       "               3.0             2.00  0.000000     0.000000    0.430758   \n",
       "\n",
       "                              test_score_20  time_taken  train_score  \n",
       "no_of_features hidden_layers                                          \n",
       "122.0          3.0                 0.761941    3.274703     0.803143  \n",
       "24.0           1.0                 0.734751    3.987215     0.813462  \n",
       "48.0           1.0                 0.714194    3.464890     0.863423  \n",
       "               3.0                 0.698460   10.472351     0.908398  \n",
       "24.0           3.0                 0.659646    6.163162     0.881251  \n",
       "122.0          1.0                 0.723190    4.691176     0.824067  \n",
       "12.0           1.0                 0.679694    6.445614     0.816836  \n",
       "               3.0                 0.735390   10.099620     0.673401  \n",
       "1.0            1.0                 0.498439    6.430936     0.867102  \n",
       "               3.0                 0.181603    1.590742     0.529925  "
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "psg.mean().sort_values(by='test_score', ascending=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.107383Z",
     "start_time": "2017-07-24T00:01:49.053943Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "Train.predictions = pd.read_pickle(\"dataset/tf_dense_only_nsl_kdd_predictions.pkl\")\n",
    "Train.predictions_ = pd.read_pickle(\"dataset/tf_dense_only_nsl_kdd_predictions__.pkl\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.138491Z",
     "start_time": "2017-07-24T00:01:49.108913Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Actual</th>\n",
       "      <th>Attack_prob</th>\n",
       "      <th>Normal_prob</th>\n",
       "      <th>Prediction</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.290404</td>\n",
       "      <td>0.709596</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.290311</td>\n",
       "      <td>0.709689</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.441519</td>\n",
       "      <td>0.558481</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.204695</td>\n",
       "      <td>0.795305</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.359508</td>\n",
       "      <td>0.640492</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.666080</td>\n",
       "      <td>0.333920</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.522911</td>\n",
       "      <td>0.477089</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.135548</td>\n",
       "      <td>0.864452</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.662445</td>\n",
       "      <td>0.337555</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.586753</td>\n",
       "      <td>0.413247</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.398439</td>\n",
       "      <td>0.601561</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.518839</td>\n",
       "      <td>0.481161</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.285653</td>\n",
       "      <td>0.714347</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.389742</td>\n",
       "      <td>0.610258</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.397625</td>\n",
       "      <td>0.602374</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.678045</td>\n",
       "      <td>0.321955</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.658028</td>\n",
       "      <td>0.341972</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.675036</td>\n",
       "      <td>0.324964</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.334857</td>\n",
       "      <td>0.665143</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.288760</td>\n",
       "      <td>0.711240</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.257760</td>\n",
       "      <td>0.742240</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.324181</td>\n",
       "      <td>0.675819</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.655833</td>\n",
       "      <td>0.344167</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.670421</td>\n",
       "      <td>0.329579</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.138075</td>\n",
       "      <td>0.861925</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.318148</td>\n",
       "      <td>0.681852</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.662715</td>\n",
       "      <td>0.337285</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.652813</td>\n",
       "      <td>0.347187</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.466128</td>\n",
       "      <td>0.533872</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.345170</td>\n",
       "      <td>0.654830</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>30</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.219251</td>\n",
       "      <td>0.780749</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>31</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.543380</td>\n",
       "      <td>0.456620</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>32</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.679098</td>\n",
       "      <td>0.320902</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>33</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.425952</td>\n",
       "      <td>0.574048</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>34</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.229939</td>\n",
       "      <td>0.770061</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>35</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.087901</td>\n",
       "      <td>0.912099</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>36</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.654126</td>\n",
       "      <td>0.345874</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>37</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.311646</td>\n",
       "      <td>0.688354</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>38</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.430014</td>\n",
       "      <td>0.569986</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>39</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.462587</td>\n",
       "      <td>0.537413</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>40</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.384066</td>\n",
       "      <td>0.615934</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>41</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.676055</td>\n",
       "      <td>0.323945</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>42</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.666674</td>\n",
       "      <td>0.333326</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>43</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.657220</td>\n",
       "      <td>0.342780</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>44</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.236042</td>\n",
       "      <td>0.763958</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>45</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.652876</td>\n",
       "      <td>0.347124</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>46</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.176722</td>\n",
       "      <td>0.823278</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>47</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.639299</td>\n",
       "      <td>0.360701</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>48</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.413357</td>\n",
       "      <td>0.586643</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>49</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.353809</td>\n",
       "      <td>0.646191</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22494</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.012420</td>\n",
       "      <td>0.987580</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22495</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.665835</td>\n",
       "      <td>0.334165</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22496</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.271595</td>\n",
       "      <td>0.728405</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22497</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.289419</td>\n",
       "      <td>0.710581</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22498</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.287932</td>\n",
       "      <td>0.712068</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22499</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.640072</td>\n",
       "      <td>0.359928</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22500</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.288896</td>\n",
       "      <td>0.711104</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22501</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.422658</td>\n",
       "      <td>0.577342</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22502</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.222248</td>\n",
       "      <td>0.777752</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22503</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.348491</td>\n",
       "      <td>0.651509</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22504</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.230068</td>\n",
       "      <td>0.769932</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22505</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.636328</td>\n",
       "      <td>0.363673</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22506</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.607841</td>\n",
       "      <td>0.392159</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22507</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.410168</td>\n",
       "      <td>0.589832</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22508</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.648628</td>\n",
       "      <td>0.351372</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22509</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.364853</td>\n",
       "      <td>0.635147</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22510</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.356348</td>\n",
       "      <td>0.643652</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22511</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.629731</td>\n",
       "      <td>0.370269</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22512</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.486851</td>\n",
       "      <td>0.513149</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22513</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.128828</td>\n",
       "      <td>0.871172</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22514</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.441461</td>\n",
       "      <td>0.558539</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22515</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.494665</td>\n",
       "      <td>0.505335</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22516</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.667365</td>\n",
       "      <td>0.332635</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22517</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.394626</td>\n",
       "      <td>0.605374</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22518</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.612999</td>\n",
       "      <td>0.387001</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22519</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.390297</td>\n",
       "      <td>0.609703</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22520</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.245386</td>\n",
       "      <td>0.754614</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22521</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.406724</td>\n",
       "      <td>0.593276</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22522</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.636279</td>\n",
       "      <td>0.363721</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22523</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.666132</td>\n",
       "      <td>0.333868</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22524</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.508207</td>\n",
       "      <td>0.491793</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22525</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.198062</td>\n",
       "      <td>0.801938</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22526</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.663925</td>\n",
       "      <td>0.336075</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22527</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.643164</td>\n",
       "      <td>0.356836</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22528</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.324625</td>\n",
       "      <td>0.675375</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22529</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.547383</td>\n",
       "      <td>0.452617</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22530</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.352351</td>\n",
       "      <td>0.647649</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22531</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.413894</td>\n",
       "      <td>0.586106</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22532</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.650537</td>\n",
       "      <td>0.349463</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22533</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.676151</td>\n",
       "      <td>0.323849</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22534</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.289614</td>\n",
       "      <td>0.710386</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22535</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.655622</td>\n",
       "      <td>0.344379</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22536</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.431428</td>\n",
       "      <td>0.568572</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22537</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.590046</td>\n",
       "      <td>0.409954</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22538</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.331959</td>\n",
       "      <td>0.668041</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22539</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.601730</td>\n",
       "      <td>0.398269</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22540</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.656616</td>\n",
       "      <td>0.343384</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22541</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.630674</td>\n",
       "      <td>0.369326</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22542</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.425283</td>\n",
       "      <td>0.574717</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22543</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.331764</td>\n",
       "      <td>0.668236</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>22544 rows × 4 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "       Actual  Attack_prob  Normal_prob  Prediction\n",
       "0         1.0     0.290404     0.709596         1.0\n",
       "1         1.0     0.290311     0.709689         1.0\n",
       "2         0.0     0.441519     0.558481         1.0\n",
       "3         1.0     0.204695     0.795305         1.0\n",
       "4         1.0     0.359508     0.640492         1.0\n",
       "5         0.0     0.666080     0.333920         0.0\n",
       "6         0.0     0.522911     0.477089         0.0\n",
       "7         1.0     0.135548     0.864452         1.0\n",
       "8         0.0     0.662445     0.337555         0.0\n",
       "9         1.0     0.586753     0.413247         0.0\n",
       "10        1.0     0.398439     0.601561         1.0\n",
       "11        0.0     0.518839     0.481161         0.0\n",
       "12        1.0     0.285653     0.714347         1.0\n",
       "13        1.0     0.389742     0.610258         1.0\n",
       "14        0.0     0.397625     0.602374         1.0\n",
       "15        0.0     0.678045     0.321955         0.0\n",
       "16        0.0     0.658028     0.341972         0.0\n",
       "17        0.0     0.675036     0.324964         0.0\n",
       "18        0.0     0.334857     0.665143         1.0\n",
       "19        1.0     0.288760     0.711240         1.0\n",
       "20        1.0     0.257760     0.742240         1.0\n",
       "21        1.0     0.324181     0.675819         1.0\n",
       "22        0.0     0.655833     0.344167         0.0\n",
       "23        0.0     0.670421     0.329579         0.0\n",
       "24        1.0     0.138075     0.861925         1.0\n",
       "25        1.0     0.318148     0.681852         1.0\n",
       "26        0.0     0.662715     0.337285         0.0\n",
       "27        0.0     0.652813     0.347187         0.0\n",
       "28        1.0     0.466128     0.533872         1.0\n",
       "29        0.0     0.345170     0.654830         1.0\n",
       "30        1.0     0.219251     0.780749         1.0\n",
       "31        0.0     0.543380     0.456620         0.0\n",
       "32        0.0     0.679098     0.320902         0.0\n",
       "33        0.0     0.425952     0.574048         1.0\n",
       "34        1.0     0.229939     0.770061         1.0\n",
       "35        1.0     0.087901     0.912099         1.0\n",
       "36        0.0     0.654126     0.345874         0.0\n",
       "37        1.0     0.311646     0.688354         1.0\n",
       "38        0.0     0.430014     0.569986         1.0\n",
       "39        0.0     0.462587     0.537413         1.0\n",
       "40        1.0     0.384066     0.615934         1.0\n",
       "41        0.0     0.676055     0.323945         0.0\n",
       "42        0.0     0.666674     0.333326         0.0\n",
       "43        0.0     0.657220     0.342780         0.0\n",
       "44        1.0     0.236042     0.763958         1.0\n",
       "45        0.0     0.652876     0.347124         0.0\n",
       "46        1.0     0.176722     0.823278         1.0\n",
       "47        1.0     0.639299     0.360701         0.0\n",
       "48        1.0     0.413357     0.586643         1.0\n",
       "49        0.0     0.353809     0.646191         1.0\n",
       "...       ...          ...          ...         ...\n",
       "22494     1.0     0.012420     0.987580         1.0\n",
       "22495     0.0     0.665835     0.334165         0.0\n",
       "22496     1.0     0.271595     0.728405         1.0\n",
       "22497     1.0     0.289419     0.710581         1.0\n",
       "22498     1.0     0.287932     0.712068         1.0\n",
       "22499     0.0     0.640072     0.359928         0.0\n",
       "22500     1.0     0.288896     0.711104         1.0\n",
       "22501     1.0     0.422658     0.577342         1.0\n",
       "22502     1.0     0.222248     0.777752         1.0\n",
       "22503     1.0     0.348491     0.651509         1.0\n",
       "22504     1.0     0.230068     0.769932         1.0\n",
       "22505     1.0     0.636328     0.363673         0.0\n",
       "22506     0.0     0.607841     0.392159         0.0\n",
       "22507     0.0     0.410168     0.589832         1.0\n",
       "22508     0.0     0.648628     0.351372         0.0\n",
       "22509     1.0     0.364853     0.635147         1.0\n",
       "22510     1.0     0.356348     0.643652         1.0\n",
       "22511     0.0     0.629731     0.370269         0.0\n",
       "22512     1.0     0.486851     0.513149         1.0\n",
       "22513     1.0     0.128828     0.871172         1.0\n",
       "22514     0.0     0.441461     0.558539         1.0\n",
       "22515     1.0     0.494665     0.505335         1.0\n",
       "22516     0.0     0.667365     0.332635         0.0\n",
       "22517     1.0     0.394626     0.605374         1.0\n",
       "22518     0.0     0.612999     0.387001         0.0\n",
       "22519     1.0     0.390297     0.609703         1.0\n",
       "22520     1.0     0.245386     0.754614         1.0\n",
       "22521     1.0     0.406724     0.593276         1.0\n",
       "22522     1.0     0.636279     0.363721         0.0\n",
       "22523     0.0     0.666132     0.333868         0.0\n",
       "22524     1.0     0.508207     0.491793         0.0\n",
       "22525     1.0     0.198062     0.801938         1.0\n",
       "22526     0.0     0.663925     0.336075         0.0\n",
       "22527     0.0     0.643164     0.356836         0.0\n",
       "22528     1.0     0.324625     0.675375         1.0\n",
       "22529     0.0     0.547383     0.452617         0.0\n",
       "22530     1.0     0.352351     0.647649         1.0\n",
       "22531     1.0     0.413894     0.586106         1.0\n",
       "22532     0.0     0.650537     0.349463         0.0\n",
       "22533     0.0     0.676151     0.323849         0.0\n",
       "22534     1.0     0.289614     0.710386         1.0\n",
       "22535     0.0     0.655622     0.344379         0.0\n",
       "22536     1.0     0.431428     0.568572         1.0\n",
       "22537     1.0     0.590046     0.409954         0.0\n",
       "22538     1.0     0.331959     0.668041         1.0\n",
       "22539     0.0     0.601730     0.398269         0.0\n",
       "22540     0.0     0.656616     0.343384         0.0\n",
       "22541     1.0     0.630674     0.369326         0.0\n",
       "22542     0.0     0.425283     0.574717         1.0\n",
       "22543     1.0     0.331764     0.668236         1.0\n",
       "\n",
       "[22544 rows x 4 columns]"
      ]
     },
     "execution_count": 24,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#epoch_nof_hidden\n",
    "Train.predictions[\"12_12_3\"]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.168234Z",
     "start_time": "2017-07-24T00:01:49.139929Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Actual</th>\n",
       "      <th>Attack_prob</th>\n",
       "      <th>Normal_prob</th>\n",
       "      <th>Prediction</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.403012</td>\n",
       "      <td>0.596988</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.396761</td>\n",
       "      <td>0.603239</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.011543</td>\n",
       "      <td>0.988457</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.342929</td>\n",
       "      <td>0.657071</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.100866</td>\n",
       "      <td>0.899134</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.570735</td>\n",
       "      <td>0.429265</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.104104</td>\n",
       "      <td>0.895896</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.395437</td>\n",
       "      <td>0.604562</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.329435</td>\n",
       "      <td>0.670565</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.482691</td>\n",
       "      <td>0.517309</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.347291</td>\n",
       "      <td>0.652709</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.347256</td>\n",
       "      <td>0.652744</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.376822</td>\n",
       "      <td>0.623178</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.370854</td>\n",
       "      <td>0.629146</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.012452</td>\n",
       "      <td>0.987548</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.102070</td>\n",
       "      <td>0.897930</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.516925</td>\n",
       "      <td>0.483075</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.142840</td>\n",
       "      <td>0.857160</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.012216</td>\n",
       "      <td>0.987784</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.402125</td>\n",
       "      <td>0.597875</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.012140</td>\n",
       "      <td>0.987860</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.397413</td>\n",
       "      <td>0.602587</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.512977</td>\n",
       "      <td>0.487023</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.405979</td>\n",
       "      <td>0.594021</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.501969</td>\n",
       "      <td>0.498031</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.370833</td>\n",
       "      <td>0.629167</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.637455</td>\n",
       "      <td>0.362545</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.525610</td>\n",
       "      <td>0.474390</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.423820</td>\n",
       "      <td>0.576180</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.378979</td>\n",
       "      <td>0.621020</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>30</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.143435</td>\n",
       "      <td>0.856565</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>31</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.371802</td>\n",
       "      <td>0.628198</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>32</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.435273</td>\n",
       "      <td>0.564727</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>33</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.368529</td>\n",
       "      <td>0.631471</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>34</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.397406</td>\n",
       "      <td>0.602594</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>35</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.496896</td>\n",
       "      <td>0.503104</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>36</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.453324</td>\n",
       "      <td>0.546676</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>37</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.379357</td>\n",
       "      <td>0.620644</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>38</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.117689</td>\n",
       "      <td>0.882311</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>39</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.068886</td>\n",
       "      <td>0.931114</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>40</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.605587</td>\n",
       "      <td>0.394413</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>41</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.614922</td>\n",
       "      <td>0.385078</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>42</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.401664</td>\n",
       "      <td>0.598336</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>43</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.453789</td>\n",
       "      <td>0.546211</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>44</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.254491</td>\n",
       "      <td>0.745509</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>45</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.329483</td>\n",
       "      <td>0.670517</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>46</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.103922</td>\n",
       "      <td>0.896078</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>47</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.370503</td>\n",
       "      <td>0.629497</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>48</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.245634</td>\n",
       "      <td>0.754366</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>49</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.347161</td>\n",
       "      <td>0.652839</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11800</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.349229</td>\n",
       "      <td>0.650771</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11801</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.221894</td>\n",
       "      <td>0.778106</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11802</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.337832</td>\n",
       "      <td>0.662167</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11803</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.404219</td>\n",
       "      <td>0.595781</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11804</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.421030</td>\n",
       "      <td>0.578970</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11805</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.381074</td>\n",
       "      <td>0.618926</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11806</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.437900</td>\n",
       "      <td>0.562100</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11807</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.393244</td>\n",
       "      <td>0.606756</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11808</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.101455</td>\n",
       "      <td>0.898545</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11809</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.294265</td>\n",
       "      <td>0.705735</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11810</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.470030</td>\n",
       "      <td>0.529970</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11811</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.430750</td>\n",
       "      <td>0.569250</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11812</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.326741</td>\n",
       "      <td>0.673259</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11813</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.397465</td>\n",
       "      <td>0.602535</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11814</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.017277</td>\n",
       "      <td>0.982723</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11815</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.394181</td>\n",
       "      <td>0.605819</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11816</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.407551</td>\n",
       "      <td>0.592449</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11817</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.438897</td>\n",
       "      <td>0.561103</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11818</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.601524</td>\n",
       "      <td>0.398476</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11819</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.052833</td>\n",
       "      <td>0.947167</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11820</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.369175</td>\n",
       "      <td>0.630825</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11821</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.128646</td>\n",
       "      <td>0.871354</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11822</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.438307</td>\n",
       "      <td>0.561693</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11823</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.334591</td>\n",
       "      <td>0.665409</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11824</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.555246</td>\n",
       "      <td>0.444754</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11825</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.614823</td>\n",
       "      <td>0.385177</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11826</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.289127</td>\n",
       "      <td>0.710873</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11827</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.094855</td>\n",
       "      <td>0.905145</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11828</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.344284</td>\n",
       "      <td>0.655716</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11829</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.436262</td>\n",
       "      <td>0.563738</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11830</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.570394</td>\n",
       "      <td>0.429607</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11831</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.204457</td>\n",
       "      <td>0.795543</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11832</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.248856</td>\n",
       "      <td>0.751144</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11833</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.469030</td>\n",
       "      <td>0.530970</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11834</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.201534</td>\n",
       "      <td>0.798466</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11835</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.436682</td>\n",
       "      <td>0.563318</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11836</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.122989</td>\n",
       "      <td>0.877011</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11837</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.425958</td>\n",
       "      <td>0.574042</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11838</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.094290</td>\n",
       "      <td>0.905710</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11839</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.162568</td>\n",
       "      <td>0.837432</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11840</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.567906</td>\n",
       "      <td>0.432094</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11841</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.439834</td>\n",
       "      <td>0.560166</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11842</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.206409</td>\n",
       "      <td>0.793591</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11843</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.349581</td>\n",
       "      <td>0.650419</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11844</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.379782</td>\n",
       "      <td>0.620218</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11845</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.439294</td>\n",
       "      <td>0.560706</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11846</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.636029</td>\n",
       "      <td>0.363971</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11847</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.322738</td>\n",
       "      <td>0.677262</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11848</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.329713</td>\n",
       "      <td>0.670287</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11849</th>\n",
       "      <td>1.0</td>\n",
       "      <td>0.369175</td>\n",
       "      <td>0.630825</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>11850 rows × 4 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "       Actual  Attack_prob  Normal_prob  Prediction\n",
       "0         1.0     0.403012     0.596988         1.0\n",
       "1         1.0     0.396761     0.603239         1.0\n",
       "2         1.0     0.011543     0.988457         1.0\n",
       "3         0.0     0.342929     0.657071         1.0\n",
       "4         1.0     0.100866     0.899134         1.0\n",
       "5         1.0     0.570735     0.429265         0.0\n",
       "6         1.0     0.104104     0.895896         1.0\n",
       "7         1.0     0.395437     0.604562         1.0\n",
       "8         0.0     0.329435     0.670565         1.0\n",
       "9         0.0     0.482691     0.517309         1.0\n",
       "10        1.0     0.347291     0.652709         1.0\n",
       "11        1.0     0.347256     0.652744         1.0\n",
       "12        1.0     0.376822     0.623178         1.0\n",
       "13        1.0     0.370854     0.629146         1.0\n",
       "14        1.0     0.012452     0.987548         1.0\n",
       "15        1.0     0.102070     0.897930         1.0\n",
       "16        1.0     0.516925     0.483075         0.0\n",
       "17        1.0     0.142840     0.857160         1.0\n",
       "18        1.0     0.012216     0.987784         1.0\n",
       "19        1.0     0.402125     0.597875         1.0\n",
       "20        1.0     0.012140     0.987860         1.0\n",
       "21        1.0     0.397413     0.602587         1.0\n",
       "22        1.0     0.512977     0.487023         0.0\n",
       "23        1.0     0.405979     0.594021         1.0\n",
       "24        1.0     0.501969     0.498031         0.0\n",
       "25        1.0     0.370833     0.629167         1.0\n",
       "26        1.0     0.637455     0.362545         0.0\n",
       "27        1.0     0.525610     0.474390         0.0\n",
       "28        0.0     0.423820     0.576180         1.0\n",
       "29        1.0     0.378979     0.621020         1.0\n",
       "30        1.0     0.143435     0.856565         1.0\n",
       "31        1.0     0.371802     0.628198         1.0\n",
       "32        0.0     0.435273     0.564727         1.0\n",
       "33        0.0     0.368529     0.631471         1.0\n",
       "34        1.0     0.397406     0.602594         1.0\n",
       "35        0.0     0.496896     0.503104         1.0\n",
       "36        1.0     0.453324     0.546676         1.0\n",
       "37        1.0     0.379357     0.620644         1.0\n",
       "38        1.0     0.117689     0.882311         1.0\n",
       "39        1.0     0.068886     0.931114         1.0\n",
       "40        0.0     0.605587     0.394413         0.0\n",
       "41        1.0     0.614922     0.385078         0.0\n",
       "42        1.0     0.401664     0.598336         1.0\n",
       "43        0.0     0.453789     0.546211         1.0\n",
       "44        1.0     0.254491     0.745509         1.0\n",
       "45        1.0     0.329483     0.670517         1.0\n",
       "46        1.0     0.103922     0.896078         1.0\n",
       "47        1.0     0.370503     0.629497         1.0\n",
       "48        1.0     0.245634     0.754366         1.0\n",
       "49        1.0     0.347161     0.652839         1.0\n",
       "...       ...          ...          ...         ...\n",
       "11800     1.0     0.349229     0.650771         1.0\n",
       "11801     1.0     0.221894     0.778106         1.0\n",
       "11802     0.0     0.337832     0.662167         1.0\n",
       "11803     1.0     0.404219     0.595781         1.0\n",
       "11804     1.0     0.421030     0.578970         1.0\n",
       "11805     1.0     0.381074     0.618926         1.0\n",
       "11806     0.0     0.437900     0.562100         1.0\n",
       "11807     1.0     0.393244     0.606756         1.0\n",
       "11808     1.0     0.101455     0.898545         1.0\n",
       "11809     1.0     0.294265     0.705735         1.0\n",
       "11810     1.0     0.470030     0.529970         1.0\n",
       "11811     1.0     0.430750     0.569250         1.0\n",
       "11812     0.0     0.326741     0.673259         1.0\n",
       "11813     1.0     0.397465     0.602535         1.0\n",
       "11814     1.0     0.017277     0.982723         1.0\n",
       "11815     1.0     0.394181     0.605819         1.0\n",
       "11816     1.0     0.407551     0.592449         1.0\n",
       "11817     0.0     0.438897     0.561103         1.0\n",
       "11818     0.0     0.601524     0.398476         0.0\n",
       "11819     1.0     0.052833     0.947167         1.0\n",
       "11820     1.0     0.369175     0.630825         1.0\n",
       "11821     1.0     0.128646     0.871354         1.0\n",
       "11822     1.0     0.438307     0.561693         1.0\n",
       "11823     1.0     0.334591     0.665409         1.0\n",
       "11824     1.0     0.555246     0.444754         0.0\n",
       "11825     0.0     0.614823     0.385177         0.0\n",
       "11826     1.0     0.289127     0.710873         1.0\n",
       "11827     1.0     0.094855     0.905145         1.0\n",
       "11828     1.0     0.344284     0.655716         1.0\n",
       "11829     1.0     0.436262     0.563738         1.0\n",
       "11830     1.0     0.570394     0.429607         0.0\n",
       "11831     1.0     0.204457     0.795543         1.0\n",
       "11832     1.0     0.248856     0.751144         1.0\n",
       "11833     0.0     0.469030     0.530970         1.0\n",
       "11834     1.0     0.201534     0.798466         1.0\n",
       "11835     0.0     0.436682     0.563318         1.0\n",
       "11836     1.0     0.122989     0.877011         1.0\n",
       "11837     1.0     0.425958     0.574042         1.0\n",
       "11838     1.0     0.094290     0.905710         1.0\n",
       "11839     1.0     0.162568     0.837432         1.0\n",
       "11840     0.0     0.567906     0.432094         0.0\n",
       "11841     0.0     0.439834     0.560166         1.0\n",
       "11842     1.0     0.206409     0.793591         1.0\n",
       "11843     1.0     0.349581     0.650419         1.0\n",
       "11844     1.0     0.379782     0.620218         1.0\n",
       "11845     0.0     0.439294     0.560706         1.0\n",
       "11846     0.0     0.636029     0.363971         0.0\n",
       "11847     1.0     0.322738     0.677262         1.0\n",
       "11848     1.0     0.329713     0.670287         1.0\n",
       "11849     1.0     0.369175     0.630825         1.0\n",
       "\n",
       "[11850 rows x 4 columns]"
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "Train.predictions_[\"12_12_3\"]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.176946Z",
     "start_time": "2017-07-24T00:01:49.169717Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "df = Train.predictions[\"12_12_3\"].dropna()\n",
    "df_ = Train.predictions_[\"12_12_3\"].dropna()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.184999Z",
     "start_time": "2017-07-24T00:01:49.178303Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from sklearn import metrics as me\n",
    "def get_score(y_true, y_pred):\n",
    "    f1 = me.f1_score(y_true, y_pred)\n",
    "    pre = me.precision_score(y_true, y_pred)\n",
    "    rec = me.recall_score(y_true, y_pred)\n",
    "    acc = me.accuracy_score(y_true, y_pred)\n",
    "    return {\"F1 Score\":f1, \"Precision\":pre, \"Recall\":rec, \"Accuracy\":acc}\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.233147Z",
     "start_time": "2017-07-24T00:01:49.186537Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Accuracy</th>\n",
       "      <th>F1 Score</th>\n",
       "      <th>Precision</th>\n",
       "      <th>Recall</th>\n",
       "      <th>Scenario</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.830509</td>\n",
       "      <td>0.857121</td>\n",
       "      <td>0.823940</td>\n",
       "      <td>0.893088</td>\n",
       "      <td>Train+/Test+</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.730211</td>\n",
       "      <td>0.839242</td>\n",
       "      <td>0.819021</td>\n",
       "      <td>0.860487</td>\n",
       "      <td>Train+/Test-</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   Accuracy  F1 Score  Precision    Recall      Scenario\n",
       "0  0.830509  0.857121   0.823940  0.893088  Train+/Test+\n",
       "1  0.730211  0.839242   0.819021  0.860487  Train+/Test-"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn import metrics as me\n",
    "\n",
    "scores = get_score(df.loc[:,'Actual'].values.astype(int),\n",
    "                df.loc[:,'Prediction'].values.astype(int))\n",
    "scores.update({\"Scenario\":\"Train+/Test+\"})\n",
    "score_df = pd.DataFrame(scores, index=[0])\n",
    "\n",
    "scores = get_score(df_.loc[:,'Actual'].values.astype(int),\n",
    "                df_.loc[:,'Prediction'].values.astype(int))\n",
    "scores.update({\"Scenario\":\"Train+/Test-\"})\n",
    "\n",
    "score_df = score_df.append(pd.DataFrame(scores, index=[1]))\n",
    "\n",
    "score_df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.240140Z",
     "start_time": "2017-07-24T00:01:49.234574Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Actual\n",
       "0.0     9711\n",
       "1.0    12833\n",
       "Name: Actual, dtype: int64"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df.groupby(by=\"Actual\").Actual.count()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.531254Z",
     "start_time": "2017-07-24T00:01:49.241503Z"
    }
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbYAAAGhCAYAAAAN2pFTAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XmcTvX7x/HXNTP2LVuRnciWfd9SWYpCKtGqlG/7rtUv\nWlTf9rR+pUVaJC1UltCC7ERFCtmJUPYw5vr9cR/TzGCMmduMOd7P7+N+uM/n/pxzrtt3cs31OZ/z\nOebuiIiIhEVMVgcgIiISTUpsIiISKkpsIiISKkpsIiISKkpsIiISKkpsIiISKkpsIiISKkpsIiIS\nKkpsIiISKnFZHYCIiERHbMFy7vG7onY83/XnOHc/O2oHzCRKbCIiIeHxu8h1areoHe+feS8Xi9rB\nMpESm4hIaBiYrjDpb0BEREJFFZuISFgYYJbVUWQ5JTYRkTDRUKSGIkVEJFxUsYmIhImGIpXYRETC\nQ7MiQUORIiISMqrYRETCREORSmwiIqFhaCgSDUWKiEjIqGITEQkN01AkSmwiIuGioUgNRYqISLio\nYhMRCRMNRSqxiYiEh27QBg1FiohIyKhiExEJCz22BlDFJiIiIaOKTUQkTHSNTYlNRCQ8NHkENBQp\nIiIho4pNRCRMYjR5RIlNRCQstLo/oKFIEREJGVVsIiJhovvYlNhERMJDsyJBQ5EiIhIyqthERMJE\nQ5FKbCIioaKhSA1FiohIuKhiExEJCzMNRaKKTUREQkYVm4hImOgamxKbiEioaChSQ5EiIhIuSmwS\nSmaWx8w+N7MtZvZRBo5zqZl9Fc3YsoqZtTSzX7M6DjmagpVHovXKprJv5BIKZnaJmc02s+1mts7M\nxphZiygc+kLgJKCou1+U3oO4+3vu3i4K8RxVZuZmdkpqfdx9srufmlkxSRbZPzMyGq9sSolNsoyZ\n3QE8DzxGJAmVBV4GOkXh8OWA39w9PgrHyvbMTNfT5bihxCZZwswKAQ8DN7r7J+6+w933uvsX7n53\n0CeXmT1vZmuD1/Nmliv4rLWZrTazO81sQ1DtXRV89hDwIHBxUAn2MrP+ZvZukvOXD6qcuGC7p5n9\nbmbbzGyZmV2apH1Kkv2amdmsYIhzlpk1S/LZt2b2iJl9HxznKzMrdojvvz/+u5PE38XMOpjZb2a2\n2czuT9K/kZlNM7O/g74vmVnO4LNJQbf5wfe9OMnx7zGzP4C39rcF+1QKzlEv2D7ZzP40s9YZ+j9W\nstb+57FpKFIkSzQFcgOfptLnAaAJUAeoDTQC+ib5vARQCCgF9AJeNrPC7t6PSBX4obvnd/c3UgvE\nzPIBA4Fz3L0A0AyYd5B+RYAvg75FgWeBL82saJJulwBXAScCOYG7Ujl1CSJ/B6WIJOLXgcuA+kBL\n4P/MrELQdx9wO1CMyN/dWcANAO7eKuhTO/i+HyY5fhEi1WvvpCd296XAPcC7ZpYXeAsY4u7fphKv\nHPN0jQ2U2CTrFAU2Hmao8FLgYXff4O5/Ag8Blyf5fG/w+V53Hw1sB9J7DSkBqGlmedx9nbsvOEif\njsBidx/q7vHu/gGwCDgvSZ+33P03d98FDCeSlA9lLzDA3fcCw4gkrRfcfVtw/oVEEjruPsfdpwfn\nXQ78Dzg9Dd+pn7vvDuJJxt1fB5YAM4CSRH6REMn2lNgkq2wCih3m2s/JwIok2yuCtsRjpEiMO4H8\nRxqIu+8ALgauA9aZ2ZdmVjUN8eyPqVSS7T+OIJ5N7r4veL8/8axP8vmu/fubWRUz+8LM/jCzrUQq\n0oMOcybxp7v/c5g+rwM1gRfdffdh+kp2oMkjSmySZaYBu4EuqfRZS2QYbb+yQVt67ADyJtkukfRD\ndx/n7m2JVC6LiPyDf7h49se0Jp0xHYlXicRV2d0LAvcTuaKSGk/tQzPLT2TyzhtA/2CoVbI7DUUq\nsUnWcPctRK4rvRxMmshrZjnM7BwzezLo9gHQ18yKB5MwHgTePdQxD2Me0MrMygYTV+7b/4GZnWRm\nnYNrbbuJDGkmHOQYo4EqwS0KcWZ2MVAd+CKdMR2JAsBWYHtQTV6f4vP1QMUjPOYLwGx3v4bItcPX\nMhylyDFAiU2yjLs/A9xBZELIn8Aq4Cbgs6DLo8Bs4EfgJ2Bu0Jaec40HPgyONYfkySgmiGMtsJnI\ntauUiQN33wScC9xJZCj1buBcd9+YnpiO0F1EJqZsI1JNfpji8/7AkGDWZLfDHczMOgNn8+/3vAOo\nt382qGRjGorE3FMdrRARkWwipnB5z9W67+E7ptE/n107x90bRO2AmUQVm4iIhIpWIxARCZNsPIQY\nLarYREQkVFSxiYiEiKliU2LLVeAEz1P05MN3FDmEsifkyeoQJJv7af7cje5ePKPHMZTYQImNPEVP\npnXfd7I6DMnGBnY9LatDkGyuXNHcKVe0yRbM7E0it8BscPeaQVsRIrejlAeWA93c/a/gs/uIrOu6\nD7jF3ccF7fWBt4E8RO4XvdXdPVj0/B0i66duAi4OlpRLla6xiYiEhUX5dXhvE7kfMql7gYnuXhmY\nGGxjZtWB7kCNYJ9XzCw22OdV4FqgcvDaf8xewF/ufgrwHPDftASlxCYiEhqGWfReh+Puk4gsapBU\nZ2BI8H4I/y6b1xkYFizKvYzIAtyNzKwkUDBY5NuJVGhdDnKsEcBZlobAlNhERORQilnkCff7X70P\nvwsnufu64P0fRB4iDJHFwlcl6bc6aCsVvE/ZnmyfYMHzLUSeDJKq4/4am4hImER58sjGjKw8Elwn\ny/TlrVSxiYiESGYORR7C+mB4keDPDUH7GqBMkn6lg7Y1wfuU7cn2CR5xVYjIJJJUKbGJiEg0jQKu\nDN5fCYxM0t7dzHIFT4avDMwMhi23mlmT4PrZFSn22X+sC4GvPQ0LHGsoUkQkRDLzPjYz+wBoTeRa\n3GqgH/AEMNzMehF5EG83AHdfYGbDiTwZPh64McmDdm/g3+n+Y4IXRJ4VONTMlhCZpNI9LXEpsYmI\nhEXap+lHhbv3OMRHZx2i/wBgwEHaZxN5knvK9n+Ai440Lg1FiohIqKhiExEJCSNDkz5CQ4lNRCRE\nlNg0FCkiIiGjik1EJERUsaliExGRkFHFJiISIqrYlNhERMIjk+9jO1ZpKFJEREJFFZuISIhoKFKJ\nTUQkNHSDdoSGIkVEJFRUsYmIhIgqNiU2EZFwUV7TUKSIiISLKjYRkbAwDUWCEpuISKgosWkoUkRE\nQkYVm4hIiKhiU2ITEQkN3aAdoaFIEREJFVVsIiJhooJNFZuIiISLKjYRkbDQfWyAEpuISKgosWko\nUkREQkYVm4hIiKhiU2ITEQkX5TUNRYqISLioYhMRCRENRSqxiYiEhpmW1AINRYqISMgosYlImqxd\ns4qLO7fjrKZ1aNOsLm/+76UD+gx6+XnKFc3N5k0bk7WvWb2SamWL8r+Xnkts+/zTj2jfsgFtmtXl\n8f4PHPX4jxf7q7ZovLIrJTYRSZPY2Dj6PvxfJk6bx2fjJvHOG6/x26JfEj9fu2YVk7+ZQKnSZQ7Y\n95G+99D6rPaJ239t3sRj/e7j/U/HMGHqD/y54Q+mfPd1pnyPsFNiU2ITkTQ6qURJTqtdF4D8BQpw\nSuWqrF+3JvHzhx+4m/v6P3bAP4jjvhxFmXLlqVK1WmLbyuXLKF/xFIoWKw5Ai9PPZMznn2XCt5Dj\ngRKbiByxVSuXs+CnedSp3wiAr0Z/TomSJ1O9Zq1k/XZs386rA5/htj7JhxrLV6zE70sWs2rlcuLj\n4xk3+nPWrV2dafGHmkXxlU1pVqSIHJEd27dzXc8ePDjgaQoULMiunTt5+bknGfrxFwf0fe7JR7nm\n+pvJlz9/svZCJxRmwNMDuanX5VhMDPUbNWHlst8z6ytIyCmxiUia7d27l+t6dqfLhd0557wuAKxY\n/jurVi7nnFYNAVi3dg0dz2jCyPFTmDdnJmNGfcLj/e9n65YtWEwMuXLlpue119Pm7I60ObsjAO8P\nGUxsbGyWfa8wyc7XxqJFiU1E0sTdufuW/3BKlapce8Otie1Vq9dk7q+rEreb16nC5xOnUqRoMUZ8\n+e+EkOf++wh58+Wn57XXA7Dxzw0UK34iW/7+i6FvDuLlN97LvC8TVnpsDaDEJiJpNHvGVD4Z/j5V\nq9fknNMj19b69H2YM9uena7jPXT/nSz8+ScAbu1zPxVPqRy1WOX4psQmImnSsElzVmz657D9vp/3\n20Hbb7/n/5Jtv/j60KjEJf8yQAWbEpuISIhk7/vPokXT/UVEJFRUsYmIhIgKNiW248rJhXLR58xK\nidsnFcjFB3PW8PmCDVzZqDQNyxYiPsH5Y+tuXpy0nB179gFQrkgerm9ejrw5Y3F37hr5C4Zx91kV\nKVEwFwkOs1b+zdBZaw51aslCzetUIV/+AonT6R956gUaNGp6yP7Vyhbll5WbMnTOO2+8hulTJ1Ow\nYCEsJoZHnnye+g2bHNExxo/5gsW//sINt/Vh3JejqFCpcuLqJc88/hCNm7agReuzMhRnGGkoUont\nuLJ2y25u/3QhADEGb/SozfQVfwMwf81Whs5aTYLDFQ1LcUHtErwzaw0xBre3rsDz3y5j+eZdFMgV\ny74EJy7G+Oyn9fy8bhtxMcbDHapQr3RB5q7empVfUQ5h2MhxFClaLFPPef9Dj9OxU1cmfTOe+++4\niXGTZx/R/m3POZe255wLwFejR3Fm+w6Jie3O+/pFPV4JD11jO07VOrkgf2zbzZ/b9wAwb81WEjzy\n2a8bdlA0X04A6pYqxPLNu1i+eRcA23bvI8Fhz74Efl63DYD4BGfpxp2J+8ixb8f27fTocjYdzmhC\nuxb1+Wr05wf0Wf/HOi469yzOOb0RbZvXY+a0KQBM+mY8XdqfToczmnD9VZewY/v2VM/VqGlLli9b\nCsCCn+bTpV0r2rdsQO/Lu7Hl778AeOt/L3NW0zq0b9mAm665HICP3n+H/7v7NmbPnMb4sV/yWL/7\nOOf0RqxYtpQ7b7yGL0d9wrcTv+L6qy5JPNe0Kd9xVY/z0xVnKFhkKDJar+xKie041aJiESYvPfhw\nU5tTizF39RYgMnyJQ7+zK/NMl2qcX6vEAf3z5YylYdkT+HGtqrVjVffO7Tnn9EZ0btsSgFy5czPo\nneGM/mY6w0aO49EH78Hdk+0z8uMPaXVGW8Z8N5Oxk2ZRvWZtNm/ayIvPPMH7n4xm9DfTqVWnHoNf\nfSHVc08c9yVVq9UE4I4benFvv0cZN3k2p1avwfNPDgDglReeZvS3Mxg3eTYDnnkx2f4NGjWl7dkd\nuf+hxxnz3UzKVfh3OL3F6Wcyb85Mdu7YAcAXn47gvPMvSlecYWBATIxF7ZVdaSjyOBQXYzQqV4ih\nsw9cdPbCOiXZl+B8t2QzEPmPpFqJ/Nz12S/sjk/g4Q5VWLpxBz+ujVRrMQZ3nFGRLxesZ/22PZn6\nPSTtUg5FujtPPvogM6dNISYmhj/WreXPDes58aR/f3GpXbc+fW75D/Hxe2nXoRM1TqvNxHGTWfzr\nIi7ocAYAe/bsoV7Dxgc952P97uOlZ56gSNFiPDnwNbZu3cLWLX/TpHkrAC7sfhnXX30pANVq1OTW\n//SkXYfzaN+hU5q/V1xcHKef1Y4J476kQ6eufD1+DPf1f4wZU9Mep4SPEttxqF7pQvy+cSdbdsUn\naz+zclEalCnEg6P/vcF20449LFi3jW27I33nrtpCxaJ5ExPbDS3Ks27rP3y+YEPmfQHJsM8++oDN\nmzbyxdfTyJEjB83rVGH37uQ3Xzdu1pKPPp/A1+PHcNdN13LN9bdQ6ITCtGx9Zppurt5/jW2/rVu3\nHLLvW8M+Y8bUyUwYN5qXnv0vX02Zk+bvct75FzFk8GucULgwp9WpT/4CBXD3NMcZNtl5CDFaNBR5\nHGpZqQiTlm5O1la3dEHOr1WCx8YvYc++hMT2H1ZvpVyRPOSMjSHGoEbJAqz6O/IP4CX1TyZfzlje\nmLYKyV62bdtK0WLFyZEjB1Mnf8vqVSsP6LN61QqKnXgSPa7oxcWXXcXPP86jboNGzJ4xjeW/R66Z\n7dyxg9+XLE7TOQsWLEShEwonXqv7ZPj7NGnWkoSEBNauWUWzlq25r98Atm3dyo4dya+H5ctfgB3b\nth30uE2at2LBjz/wwTtv0anrRQAZijO704NGVbEdd3LFxVC7VEFenbIiWXvvpmXJERvDQ+dUAeDX\nDdt57fuV7Nizj1E/r+fpLtVwh7mrtzBn1RaK5s1Bt7ons+rvXTx7fnUAvly4gQm/bsz07yRHrsuF\n3bn6kgto16I+terUo1LlUw/oM33KJP730nPE5chBvnz5ePaVNyharDhPv/Q6N197BXv27Abgrvv7\np3mdx2deHswDd97Mrl07KVuuAk+/NIh9+/Zx23VXsW3rVtydq3rfQKFCJyTb77yuF3HvbTfw1uuv\n8Npb7yf7LDY2ljPbdWDEsKE8+/JggAzHKdmbpbxgfLw5oXx1b933nawOQ7KxgV1Py+oQJJsrVzT3\nHHdvkNHj5Dm5ip/S6+VohATAz4+2i0pcmU1DkSIiEioaihQRCYnI6v7Z99pYtCixHcOe7FSVHLEx\n5M8VS87YGDbv3AvA4+OXsGF79KbWlyiYi9e6ncZr369g7C9/AnBd87L8sn574rT/aMifK5bmFYow\nblHkHMXy5aBn4zI8/fXvUTuHHF7nti3Zs2c3f/+1mX/++YcSJU8GYNDQ4ZQpWz7q53tqQD+KFC1G\nr+tu5tb/9KRDp66075h8Sv+t/+nJ7JnTKFiwEAD58udP9pBSSavsPekjWpTYjmF3j1oERKbhVyqW\nj9enHThzDSL3kiVk8FLpXzv3cl7Nk/hq0Z8ZPtah5M8Vx9nViicmto079iqpZYGR4ycDkZU9fpw3\nl0eefD6LI4p48NGnDkh4ScXHxxMXF3fI7bTuJ+Gn/7ezoRiDoZfV4evFmzjt5AK8OmUFfc6qxK0f\nL2DHnn1UKZ6PSxuUot+Y38gdF0PvZmUpfUIe4mKMD+auYdbKA+8n+nvXXpZu3EHrU4ry9eLkK5KU\nLJiL3s3KUjB3HP/sTeDlyctZu3U3JQvm4vbWFcgVF8PMlVvoUK04lw6dR54cMdzX9hTy5YwlNsZ4\nd9YaZq/awhUNS3Fyodw8d3515q7ewvhfN3LPWZW4/dOFPN25Gs9++ztrt0RmsD127qm8PnUl67bu\nTlP8knHvDxnM70uX0PfhJwAY+uYgVq5YxqVXXsM1l11I1eo1Wfjzj1StXpNnXx5M7jx5mD93NgP6\n3cuOHTsoWqw4z7z0OsVPPClqMT01oB9rV69ixfLfKVO2PE1atOLrcWPYum0LMRbDe5+M5tEH72Xy\nNxMwM269+wE6durKlG8n8uKzT5A3X35WLPudr6fPj1pMxzoVbEps2Va+XHEs+GMbb0xP/R6yi+ud\nzNzVWxg4aTn5csbyZOdqzFuzlb37DizLPp7/B/e3PYVvUiS2G1qU4+XJK/hj226qnpSf3s3K0n/s\nYq5tWpbPflrP1GV/0aF68cT+e+Kdx8cvYdfeBArljuPx86oye9UW3pm1hpIFcycuxFyiYK7Efab8\nvpkWFYowfN46iubNQYFccSzbvIsrG5VOc/ySMeed340OZzTm3gcfJS4ujo/ef4dngunzi3/9hSdf\neI16DRtz+/VX896QwVx2VW/6338Xb7w3giJFi/HpRx/wzOMP8cRzr6Tr/A/37cPzTz4KQNXqNXnu\n1TcBWLr4N4Z/MYHcuXPzwdA3WfDTPMZ8N5NCJxTm808/Yslvixg7aRabNv5JpzbNady0BQA/zpvL\nhKk/UKp02Sj87WQfGoo8ionNzBx41t3vDLbvAvK7e/+jdc6DxPA28IW7j8isc2aWvfsSmL7878P2\nq1OqIPVKF+SC2iUByBlrFM+Xk7Vbdx/Qd93W3SzbtJMWFYsktuXLGUuVE/NzT5t/1+eLCf7DqXJi\nPh4ZF7npddLSzVxavxQQ+Y3xioalqVYiP+5QLF9OCuRK/Uft+2V/8UC7Uxg+bx0tKhXh+2V/HXH8\nkjEFChakcdMWfDthHGXLVyAmNpbKp1Zl+e9LKVOufOKSVOd368H7Q96kSfNWLF60kEu7dgBg3759\nlDi5VLrPf6ihyLbnnEvu3LkTt1ue0YZCJxQGYNb0qXTu2o3Y2FhOPKkEDZs048d5c8mZIwf1GjQ+\n7pKaRBzNim030NXMHnf3I75r18zi3D3+8D2PT3viE5JtJyR44hBEzrjkd3E8Pn4pf2xLWyL4aN46\n7jijIr9t2JHYtu2f+MQqKy1aVy5K3pyx3PHpQhIcBveoRc7Y1H+L/HP7Hv7Zm0DpE3LTvEIRBk5a\nlq74JWMuvvwqBr8ykNJly9HtkssT21NWAWaGu1O1Rs2jPskjb758ybfz5jtEzxT7pbFfqGTzVfmj\n5WjexxYPDAJuT/mBmZU3s6/N7Eczm2hmZYP2t83sNTObATxpZv3NbIiZTTazFWbW1cyeNLOfzGys\nmeUI9nvQzGaZ2c9mNsiOw1p8w/Y9VCoW+Q+5afl/V22Yt2YrHWucmLhdoWieVI+z6u9/+GPbbuqV\nKQjAjj372LxzL43LRY5pQPkikWMs/nMHTYJztUxR5W3ZFU+CQ+1SBSkWPM5m19595Mlx6B+5Kb9v\n5sLaJckRa6wOlu060vglYxo2bsbK5b8zeuTHnHv+RYntq1YsZ/7cyPPURo74kIZNmlH51Gr8sW4t\n8+bMAiILDf+2KO2/AEVDo6bNGfXpRyQkJPDnhvXMnjGNWnXqZWoMx5L90/2P9yW1jvYN2i8Dl5pZ\noRTtLwJD3L0W8B4wMMlnpYFm7n5HsF0JOBPoBLwLfOPupwG7gI5Bn5fcvaG71wTyAOcelW9zDBs2\ndy3/aVaWpzpXIz7JtMZhc9eSKy6GF7pWZ+AFNehe7+TDHuujH9ZRPP+/17+e+WYpZ1crznPnV2fg\nhTVoUDaSzF6ftpLza5Xk+a7VObFArsQnbn+7eBNVT8rHC12r07JiYdZsiSSpLbviWbpxJy90rc7l\nDQ8csvp+2V+0TDIMmd74JWPOOe98GjVtkTj1HuCUKlUZ/OoLnNmkNrt27eKSK3qRK1cuXn3rAx75\nv7tp37IBHVo35oc5M9N93of79uGc0xslvvbt23fYfTp06kqlyqfSvmUDLu3agb6PPkmx4icedj8J\nt6O2pJaZbXf3/Gb2MLCXSCLK7+79zWwjUNLd9wZV1zp3LxZcE/vG3YcEx+gP7HX3AWYWExwjt7t7\ncNzN7v68mV0A3A3kBYoAL7r7E4e6xmZmvYHeAHmKlKjf7r8HPmRRDi9XXAy7gyHR008pQpNyhfnv\nxKVZHFXmC9uSWldcdB433NYn8fEyy39fyvVX9WDMd+lPWpK6aC2pla/UqV7t+teiERIAc/7vzGy5\npFZmzIp8HpgLvJXG/jtSbO8GcPcEM9vr/2biBCDOzHIDrwAN3H1VkAxzkwp3H0RkmJQTylfX9Lp0\nqlw8H72alMEMtu/ex4tJrotJ9vPX5k10ad+KWnXqJSY1yX6y8xBitBz1xObum81sONALeDNongp0\nB4YClwKTM3CK/Ulso5nlBy4EQjcL8lj087ptRzSpRI5thYsU5btZCw5oL1+xkqo1yVYy6z62Z4Cb\nkmzfDLxlZn2AP4Gr0ntgd//bzF4Hfgb+AGZlJFARkexMBdtRTGzunj/J+/VErn/t315BZEJIyn16\nptjun8ox+yd53xfoe7jjiYiEmmkoEvTYGhERCRktqSUiEhKR+9iyOoqsp4pNRETSzcxuN7MFwQIZ\nH5hZbjMrYmbjzWxx8GfhJP3vM7MlZvarmbVP0l4/WHxjiZkNzMhCG0psIiKhEb1VR9KSV8ysFHAL\nkdutagKxRGa83wtMdPfKwMRgGzOrHnxeAzgbeMXMYoPDvQpcC1QOXmen929BiU1EJETMovdKozgg\nj5nFEZkkuBboDAwJPh8CdAnedwaGuftud18GLAEamVlJoKC7Tw/uVX4nyT5HTIlNREQOpZiZzU7y\n6p30Q3dfAzwNrATWAVvc/SvgJHdfF3T7A9j/kL5SQNJnba0O2koF71O2p4smj4iIhEiUp/tvTG1J\nreDaWWegAvA38JGZXZa0T7AEYqau8KTEJiISFpn/2Jo2wDJ3/xPAzD4BmgHrzayku68Lhhk3BP3X\nAGWS7F86aFsTvE/Zni4aihQRkfRaCTQxs7zBLMazgF+AUcCVQZ8rgZHB+1FAdzPLZWYViEwSmRkM\nW241sybBca5Iss8RU8UmIhIS+5/HllncfYaZjSCy0H088AORBebzA8PNrBewAugW9F8QrB28MOh/\no7vvfz7RDcDbRB49NiZ4pYsSm4hIiGT2klru3g/ol6J5N5Hq7WD9BwADDtI+G6gZjZg0FCkiIqGi\nik1EJES0pJYSm4hIqGh1fw1FiohIyKhiExEJi8y/j+2YpIpNRERCRRWbiEhIGGlblT/slNhEREJE\neU1DkSIiEjKq2EREQiRGJZsSm4hImCivaShSRERCRhWbiEhImGnlEVBiExEJlRjlNQ1FiohIuKhi\nExEJEQ1FKrGJiISK8pqGIkVEJGRUsYmIhIQRWS/yeKfEJiISIpoVqaFIEREJGVVsIiJhYXpsDahi\nExGRkFHFJiISIirYlNhERELD0GNrQEORIiISMqrYRERCRAWbEpuISKhoVqSGIkVEJGRUsYmIhETk\nQaNZHUXWU2ITEQkRzYrUUKSIiISMKjYRkRBRvZZKYjOzgqnt6O5box+OiIhkhGZFpl6xLQCc5L8A\n7N92oOxRjEtERCRdDpnY3L1MZgYiIiIZE1lSK6ujyHppmjxiZt3N7P7gfWkzq390wxIRkSMWPLYm\nWq/s6rCJzcxeAs4ALg+adgKvHc2gRERE0istsyKbuXs9M/sBwN03m1nOoxyXiIikQzYutKImLUOR\ne80shsiEEcysKJBwVKMSERFJp7RUbC8DHwPFzewhoBvw0FGNSkRE0iU7XxuLlsMmNnd/x8zmAG2C\npovc/eejG5aIiBwpzYqMSOvKI7HAXiLDkVqGS0REjllpmRX5APABcDJQGnjfzO472oGJiMiR03T/\ntFVsVwASHTWhAAAgAElEQVR13X0ngJkNAH4AHj+agYmIyJHLvukoetIyrLiO5AkwLmgTERE55qS2\nCPJzRK6pbQYWmNm4YLsdMCtzwhMRkbQy0/PYIPWhyP0zHxcAXyZpn370whERkYxQXkt9EeQ3MjMQ\nERGRaDjs5BEzqwQMAKoDufe3u3uVoxiXiIikQ3aezRgtaZk88jbwFpHJNucAw4EPj2JMIiKSTmbR\ne2VXaUlsed19HIC7L3X3vkQSnIiIyDEnLfex7Q4WQV5qZtcBa4ACRzcsERE5UoZpViRpS2y3A/mA\nW4hcaysEXH00gxIREUmvtCyCPCN4u41/HzYqIiLHmmx+bSxaUrtB+1OCZ7AdjLt3PSoRiYhIumlW\nZOoV20uZFkUWqlQ0L8N6NsjqMCQbK9zwpqwOQUSSSO0G7YmZGYiIiGScniuW9uexiYjIMc7QUCQo\nuYuISMikuWIzs1zuvvtoBiMiIhkTo4ItTU/QbmRmPwGLg+3aZvbiUY9MRESOWIxF75VdpWUociBw\nLrAJwN3nA2cczaBERETSKy1DkTHuviLFBcl9RykeERFJp8jixdm41IqStCS2VWbWCHAziwVuBn47\numGJiEh6ZOchxGhJy1Dk9cAdQFlgPdAkaBMRETnmpGWtyA1A90yIRUREMkgjkWl7gvbrHGTNSHfv\nfVQiEhGRdDHQY2tI2zW2CUne5wbOB1YdnXBEREQy5rDX2Nz9wySvIUBXoP7RD01ERI5UTBRfaWFm\nJ5jZCDNbZGa/mFlTMytiZuPNbHHwZ+Ek/e8zsyVm9quZtU/SXt/Mfgo+G2gZmN6ZniW1KgAnpfeE\nIiISKi8AY929KlAb+AW4F5jo7pWBicE2ZladyJyNGsDZwCvBbHuAV4FrgcrB6+z0BpSWa2x/8e81\nthhg8/4gRUTk2JKZl9jMrBDQCugJ4O57gD1m1hloHXQbAnwL3AN0BoYFyzMuM7MlQCMzWw4UdPfp\nwXHfAboAY9ITV6qJLSgFawNrgqYEdz/kw0dFRCTrmFm0J48UM7PZSbYHufugJNsVgD+Bt8ysNjAH\nuBU4yd3XBX3+4N9RvlLA9CT7rw7a9gbvU7anS6qJzd3dzEa7e830nkBERLKtje6e2pOY44B6wM3u\nPsPMXiDFiF6QRzK1IErLNbZ5Zlb3qEciIiIZFllWKzqvNFgNrHb3GcH2CCKJbr2ZlYzEYyWBDcHn\na4AySfYvHbStCd6nbE+XQyY2M9tfzdUFZgUzWOaa2Q9mNje9JxQRkaMnM1f3d/c/iCy7eGrQdBaw\nEBgFXBm0XQmMDN6PArqbWS4zq0BkksjMYNhyq5k1CS6BXZFknyOW2lDkTCKZt1N6Dy4iIqF3M/Ce\nmeUEfgeuIlI0DTezXsAKoBuAuy8ws+FEkl88cKO7719U/wbgbSAPkUkj6Zo4AqknNgsCWZreg4uI\nSObJipVH3H0ecLDrcGcdov8AYMBB2mcDUZnPkVpiK25mdxzqQ3d/NhoBiIhI9GhFrdQTWyyQn6By\nExERyQ5SS2zr3P3hTItEREQyJo2TPsLusNfYREQk+zD9053qfWwHvfAnIiJyLDtkxebumzMzEBER\nyZjIrMisjiLrpeV5bCIikk0osaXvsTUiIiLHLFVsIiIhkoHnc4aGKjYREQkVVWwiIiGhySMRSmwi\nImGR9sfNhJqGIkVEJFRUsYmIhEhmr+5/LFJiExEJCV1ji9BQpIiIhIoqNhGRENFIpBKbiEiIGDFa\n3V9DkSIiEi6q2EREQsLQUCSoYjvu/Oeaqyl78onUr1MzWftD/f6PhnVr0bh+Hc49px1r164F4IP3\n36Nx/TqJr7w5Y5g/bx47d+7k/E4dqV2zKvVq16Dv/fdmxdcRkaSCJ2hH65VdKbEdZy6/sicjvxh7\nQPvtd/Zh1g8/MmPOPM7pcC6PP/owAD0uuZQZc+YxY8483nh7KOUrVKB2nToA3HbHXcz/eRHTZ/3A\ntKnfM27smEz9LiIiB6OhyONMi5atWLF8+QHtBQsWTHy/c+eOg64QPvzDD7ioW3cA8ubNy+mtzwAg\nZ86c1KlbjzWrVx+doEUkzXSDthKbJNHv/x7gvXffoVChQowd/80Bn4/46EM++njkAe1///03o7/8\nnJtuvjUzwhQRSZWGIiXRQ48MYMmyVXTvcSmvvfJSss9mzphB3jx5qVEz+bW5+Ph4rrysBzfceAsV\nKlbMzHBFJIX9k0ei9cqulNjkABf3uJTPPv04WdtHw4fRrXuPA/reeF1vKp1SmZtvvS2zwhORVMSY\nRe2VXSmxCQBLFi9OfP/FqJFUObVq4nZCQgIfjxieeH1tv/4P9mXL1i08/ezzmRaniMjhKLEdZ664\nrAetWzblt19/pVL50rz95hsA9H3gXurXqUnDurWYOOErnn72hcR9pkyeROnSZZINNa5evZr/Pj6A\nRb8spGnDejSuX4e33hic6d9HRJLTUKQmjxx33nn3g4O2Dxv+8UHbAVqd3ppJ309P1la6dGl27fWo\nxiYiGWOoWgH9HYiISMioYhMRCQvjoPegHm+U2EREQkRpTUORx6RTTylPgzqnJa7POG3q1FT7Fzsh\nf4bPee3VPalYrhS7d+8GYOPGjZx6SvkMHzelUSM/45eFCxO3H+7/IF9PnBD180j0vNbvUlZMfJzZ\nH92frL1rm7rMGfEAO+YMpF71sgfsV6ZEYf78/hluu/ysxLYccbG81LcHP372IPM+6UuXsyLLszWv\nV4mp79/DtlkvcH6bOkf3C0noKbEdo8ZO+CZxjcamzZplyjljY2MZ8tabR/Ucn4/8jF9++TexPdj/\nYc48q81RPadkzNDPp9P5xpcPaF+wdC3d73ydKXOXHnS//97Zla++X5Cs7Z5r2vPn5m3U6vIwdS8Y\nwOQ5kdtMVq37i979hvLh2NnR/wLHEUP3sYESW7axfft2zml3Fk0b1qNBndP4fNSBS1utW7eONme0\nonH9OtSvU5MpUyYDMGH8V5zeoilNG9bjku4XsX379oOe46abb+PFgc8RHx9/wGfPPvMUzZs0pGHd\nWjzyUL/E9scHPEKtGqdy5uktuOKyHjz37NMAvDn4dZo3aUijerXp3u0Cdu7cybSpU/nyi1Hcf28f\nGtevw+9Ll3Lt1T355OMRfDVuLJd0vyjxuJO++5aunc89ovjl6Ph+7lI2b9l5QPuvy9azeMWGg+5z\nXutaLF+ziYVL/0jWfmXnpjz15lcAuDub/t4BwMp1m/l58VoSEjTTNqMsiq/sSontGHV2mzNoXL8O\nLZs1BiB37tx8OOJTps2ay9gJ33Dv3XfinvwfgQ+HvU/bdu2ZMWceM+fMp3btOmzcuJEnHnuU0eMm\nMG3WXOrVb8DA55896DnLlC1Ls2YteP/docnaJ4z/iqWLFzNl2kxmzJnHD3PnMGXyJGbPmsVnn3zM\nzDnzGfnFGObO+fe37c7nd+X76bOYOXc+VatW4+0336Bps2Z0PLcTjz3xFDPmzKNipUqJ/c88qw2z\nZs5gx47IP3Qjhn/IRd26H1H8cmzIlycnd17VlgH/G52svVD+PAD0u/Fcpr5/D+89eTUnFimQFSFK\nyGnyyDFq7IRvKFasWOK2u/Ng3/v5fvIkYmJiWLtmDevXr6dEiRKJfRo0aMh/rr2avXv3cl6nLtSu\nU4fJk75j0S8LObNVcwD27N1D48ZND3nePvfcx0UXdObsDh0T2yaM/4oJE76iSYO6AGzfsZ0lixez\nbds2zu3Umdy5c5M7d246dDwvcZ+FC36OrEzy999s37Gdtm3bp/p94+LiaNfubL784nO6XnAhY8Z8\nyYAnnjzi+CXr9b2uIy+++zU7du1J1h4XF0PpEoWZPv937nnmE2657Ewev/18ev3fO1kUaThl4xHE\nqFFiyyaGvf8eGzf+ydSZc8iRIwennlKe3f/8k6xPi5atGP/1JMaO/pLevXpyy213cELhwpzZpu0h\nb8xO6ZTKlalVuw4ffzQ8sc3d6XP3fVzT+z/J+r74wqGX0rq2V0+Gj/iMWrVrM3TI20z67tvDnvui\ni7vz6isvUaRIEerVb0CBAgVw9yOKX7Jew5rlOL9NHQbc1oVCBfKQkOD8s2cvr304iR27dvPZxPkA\nfDJ+Lld20S8p0WWa7o+GIrONLVu2ULz4ieTIkYPvvv2GlStWHNBnxYoVnHTSSVx9zbX0vPoafvhh\nLo0aN2Ha1O9ZumQJADt27GDxb7+leq577n2A5597OnG7bbv2DHn7zcRrW2vWrGHDhg00bdac0V98\nzj///MP27dsZM/qLxH22b9tGiZIl2bt3L8M+eC+xPX+BAmzftu2g523Z6nTm/TCXN994PXFdyvTE\nL1mrTa/nqdqxH1U79uOl977lqTe+4rUPJwEwetLPtGpQGYDWjU5l0e/rsjJUCSkltmyi+yWXMnfO\nbBrUOY33hr7DqVWrHtBn8nff0qh+bZo0qMuIjz7kpptvpXjx4rz+xttccVkPGtatResWTfn110Wp\nnqt6jRrUqVsvcbtN23Zc3P0SWrdoSoM6p3HJxReyfds2GjRsSMfzOtGwXi06n3sONWqeRqGChQB4\nsP8jtGremDNaNU+2oPJF3brz3LNP0aRBXX5fmnw2XWxsLOd0OJevxo6hQ8fIxJH0xC/RNeTxnnw7\n5E6qlDuJJWMfSayyOp1RiyVjH6FxrfJ8MvA6Rr1842GP1feFz+h7XQdmfngfl3RsxL3PfgpA/epl\nWTL2Ebq2rcuLD/RgzogHjup3Cqv9S2pF65VdWcoJCMeb+vUb+PczNMU4vbZv307+/PnZuXMnbc9o\nxUuvDqJuvXqH3zFECje8KatDkGzun3kvz3H3Bhk9TqXqtf2x90YfvmMada9XOipxZTZdY5MMufH6\n3ixauJB/dv/DZZdfedwlNZFjja6xKbFJBg0Z+n5WhyAiSSitKbFlSy2bNWbP7t1s/msz/+zaxckn\nlwJg+MefUa58+aifr/+DfSlatNgBT8nu/2Bf3hnyFsWLFU9sm/DtZAoU0L1Jx6JJ79xFzpxxFCmY\nl9y5c7B2wxYAut0+iJXrNkftPBXLFGP28Pv5bcUGcuaI5btZi7n9ieGH3zGFUS/fyCV9BpMjLpYL\n2tVj8IgpAJQ+6QQev/18Lr/3rajFLOGixJYNTZ46A4ChQ95mzpzZPD/wpSyL5fY7+hyQ8JKKj48n\nLi7ukNuH4u64OzEx2fkS9rGl1RWRma6XndeY+tXLcvt/Pzpov5gYy/AKIL+t2ECT7k8QFxfDV6/f\nSsfTT+PL7346omN0CpbxqlimGNdc2CIxsa1e/7eS2qFodX8ge098kRTeeH0Q9959V+L2oNde5b57\n+rB0yRLq1a7B5Zd2p85p1bi0Rzd27doFwOxZs2h75uk0a1Sfzueew/r16zMcx1tvDOaiC7rQvs0Z\nnNehPV9PnEC7s1rTtfO5NKh7GgDPPP0k9evUpH6dmrzy0osALF2yhLq1qtPz8kupV7sG69ZpKnhm\niI2NYd2kJ3nqrguY+eF9NKxZniVjH0lcKaTRaeX58rXIBJl8eXIy6KHLmDz0LqZ9cA8dWtVM9djx\n8QnM+HE5lcoUx8z4751dmf3R/cwafn/iYscnFy/ExDdvZ/qwe5n90f00qV0BIDGGR2/pTJVyJzJ9\n2L08cksnKpYpxvRh9wIw5b27qVzuxMTzTXzzdmpVKXXEcYaFZkVGqGILkYsu7k6ThnV59LEniIuL\n450hbzH4zSEA/LJwIa/+7w0aN2lCr55XMHjQ/+h93fXcdcetjPh0FMWKFeOD99/j4X7/x8uvDUrz\nOZ979inefedtAIoWK8bocZGV+ufP+4EZs+dRuHBhvp44gblzZjP3x4WULVuWmTNm8OH77zFl2izi\n4+Np2awRrU5vTZ48efh10SIGv/kO9Rtku4lY2doJBfIyZe4S+jx96CepA9zf+xzGT/2F3v3e5YQC\neZg0tA8Tpy9i954D1xcFyJs7J6c3rELfF0ZyQdu6nFrhJBpd/DjFC+dnyrt3M2XOEnp0bMjoST/x\nzNsTiIkx8uTKkewYfQeOpGKZ4jTp/gQQqeD2+3jcHC5oV48nXh9LqRNPoHChvPz42xoG3Nr5iOKU\ncMn0xGZmXYBPgWruvsjMygPN3P394PM6wMnunq45q2a2HGjg7hujE3H2UbBgQVq0aMW4sWOoUKEi\nsbGxVK1WjaVLllC+QgUaN2kCQI9LL+ONwYNodXprflm4gI7tI6vr79u3j1KlSx/ROQ81FNmmTTsK\nFy6cuN24SVPKlo082mTq1Cl06XoBefJEKoLzOnXh+ymTadO2HRUrVVJSywK79+xl5NfzD9vvrKbV\naNe8Bnde1RaA3DnjKFOiCEtWJl8MeX+FlZDgjPpmPl/PWMSz91zE8LFzSEhw1m/axtR5S6lXoyyz\nF6zkpb7dyZUzB59/+yM//bYmzXF/PH4uI56/jideH8uF7evxyfgfjijOMNJQZNZUbD2AKcGf/YDy\nwCXA/ul1dYAGQPRuxjiO9Lz6Gga+8CzlypXniiuvSmxP+cNuZrg7NU+rxcRvJ0c9jrz58qW6fSj5\n8qatn0TXrt17k23H70sgJibyM5Mr578VlBl0u2MQy1an/nvj/mtsafHdrN9of80LnN2yJoMfuZzn\n3p7AsDFpu7d05bq/2LFrN1UrluDCdvW4tt+7RxRnGCmtZfIwqpnlB1oAvYDuQfMTQEszm2dm9wAP\nAxcH2xebWSMzm2ZmP5jZVDM7NThWrJk9bWY/m9mPZnZzinPlMbMxZnZtJn7FLNeseXOWLV3KJx9/\nxIXdLk5sX75sGbNnzQLgww/ep1mzFlSrXp21a9cwa+ZMAPbs2cPCBQsOetxoat68JaM++5Rdu3ax\nfft2vvh8JM1btDzq55W0W7F2M3WrRSrspA/+nDD1F27ofnridu1T017hfz93CRe1r4+ZcWKRAjSt\nXZG5C1ZStmRh/ti0lTc/+Z6hI6dTu2qZZPtt37GbAnlzHfK4I8bNpc9V7ciZM45Fv/+R4Tgl+8vs\niq0zMNbdfzOzTWZWH7gXuMvdzwUws/VEhhJvCrYLAi3dPd7M2gCPARcAvYlUe3WCz4okOU9+YBjw\njrsfd0uHn3/Bhfy6aBGFChVKbKtarRoDX3iWH+fPo0bN0+h1bW9y5crF+8NGcOftt7Bt61b2Jezj\n1tvupHqNGmk+V9JrbAAjPvv8sPs0bNSIi7r3oEXThgBc2/t6ap52WuJ6kJL1Hn1tNK882IMt23Yx\nZe6//78M+N8YnupzAbOG309MjLF01Z90uz1t12Q/mTCPRrUqMGv4fbjDPc9+wp9/beeKzk245bIz\n2Ru/j+07d9Or75Bk+23YvI0fflnFrOH3M3bKz7z16dQUx/2B/97ZlYdf/TIqcWZ3GonM5CW1zOwL\n4AV3H29mtwBlgS9Inth6kjyxlQEGApUBB3K4e1Uz+xh4zd3HpzjHcmAL8KS7v8dBmFlvIomRMmXL\n1v9t6YELCmdnnTqeTZ977qNlq8hvrEuXLOGSiy9kxpx5WRxZOGlJLcmoaC2pVblGbX922FfRCAmA\nTrVKZMsltTJtKDKoqM4EBgfJpw/QjcMPCT8CfOPuNYHzgNxpON33wNl2iKuo7j7I3Ru4e4OkNxdn\nd5s2baJmtcqcULhwYlITETneZOY1tguBoe5ezt3Lu3sZYBmQACRdqmJbiu1CwP5pUj2TtI8H/mNm\ncZCYOPd7EPgLeDmq3+AYV7RoUX7+ZfEBzy6rdMopqtZEjhNm0XtlV5mZ2HoQmeaf1MdEJpHsM7P5\nZnY78A1Qff/kEeBJ4HEz+4Hk1wQHAyuBH81sPpGZlUndCuQxsyePwncRETkGWVT/l11l2uQRdz/j\nIG0DD9G9YYrtKkne9w32jQfuCF5Jj1k+yeZViIjIcUUrj4iIhEh2HkKMluy8HJiIiMgBVLGJiIRE\nZBFklWxKbCIiYZHNZzNGi4YiRUQkVFSxiYiEiCo2JTYRkVDJzvefRYuGIkVEJFRUsYmIhIQBMSrY\nlNhERMJEQ5EaihQRkZBRxSYiEiKaFanEJiISKhqK1FCkiIiEjBKbiEhI7J8VGa1Xms9rFmtmP5jZ\nF8F2ETMbb2aLgz8LJ+l7n5ktMbNfzax9kvb6ZvZT8NlAs/QPqiqxiYhIRt0K/JJk+15gortXBiYG\n25hZdSIPl64BnA28YmaxwT6vAtcClYPX2ekNRolNRCQ0Mv8J2mZWGugIDE7S3BkYErwfAnRJ0j7M\n3Xe7+zJgCdDIzEoCBd19urs78E6SfY6YJo+IiIRF9Ff3L2Zms5NsD3L3QSn6PA/cDRRI0naSu68L\n3v8BnBS8LwVMT9JvddC2N3ifsj1dlNhERORQNrp7g0N9aGbnAhvcfY6ZtT5YH3d3M/OjFeDBKLGJ\niIRIJk/2bw50MrMOQG6goJm9C6w3s5Luvi4YZtwQ9F8DlEmyf+mgbU3wPmV7uugam4hISERmRVrU\nXofj7ve5e2l3L09kUsjX7n4ZMAq4Muh2JTAyeD8K6G5mucysApFJIjODYcutZtYkmA15RZJ9jpgq\nNhERibYngOFm1gtYAXQDcPcFZjYcWAjEAze6+75gnxuAt4E8wJjglS5KbCIiIZJV6464+7fAt8H7\nTcBZh+g3ABhwkPbZQM1oxKLEJiISJlpRS9fYREQkXFSxiYiEiBZBVmITEQkVPbZGQ5EiIhIyqthE\nREJEBZsSm4hIuCizaShSRETCRRWbiEhIGJoVCarYREQkZFSxiYiERfSfx5YtKbGJiISI8pqGIkVE\nJGRUsYmIhIlKNiU2EZHwMM2KREORIiISMqrYRERCRLMildhERELD0CU20FCkiIiEjCo2EZEwUcmm\nxCYiEiaaFamhSBERCRlVbCIiIaJZkUpsIiKhorymoUgREQkZVWwiImGhG9kAVWwiIhIyqthEREJE\n0/2V2EREQsPQrEjQUKSIiISMKjYRkRBRwabEJiISLspsGooUEZFwUcUmIhIimhWpxCYiEiqaFamh\nSBERCRlVbCIiIaKCTYlNRCRclNk0FCkiIuGiik1EJCQii/urZFPFJiIioaKKTUQkLEzT/UGJTUQk\nVJTXNBQpIiIhc9xXbHPnztmYJ4etyOo4jnHFgI1ZHYRka/oZSl25qB1JJZsSm7sXz+oYjnVmNtvd\nG2R1HJJ96Wcos5hmRaKhSBERCZnjvmITEQkTzYpUYpO0GZTVAUi2p5+hTGDoEhtoKFLSwN31j5Jk\niH6GJDOpYhMRCROVbEpsIiJholmRGooUEZGQUcUmGWJm1YCSwGR335vV8Uj2YWbm7p7VcYSNZkUq\nsUnGdQfKAPvMbKqSm6TV/qRmZk2A5e7+RxaHFArKaxqKlIx7CFgOXAy0MLMcWRuOHOvMrK6Z5Qze\nVwIGAPFZG5WEiRKbHDGzfwc73D2ByD9M61Byk7TpD3weJLdlwBZgD4CZxZhZbBbGlr0Fj62J1iu7\nUmKTI5L0uoiZtTOz1sAJwKPASiLJrZmSm6RkZjEA7t4Z+AsYDuQnUvHnDT5LAHJmUYgSErrGJkck\nSVK7AzgfWAhcCwx298fM7B6gN7APmJJlgcoxJfiFKCF4X9zdu5vZSGAakZ+Vkma2D8gBrDOz+9x9\nVxaGnI1l41IrSpTY5IiZWRvgDHdvaWaPA42AHmaGu//XzG4HlmRtlHIsSfIL0S1AAzO73t07m9lr\nwFnAk0Asker/VyW19DGy9xBitCixyWEdZFr2KuBmM+sJNAQ6AM8B/c0sh7s/lwVhyjHOzM4HrgTO\ndfcdAO5+nZl9BDwCdHF3TSKRDNM1NklVimtqjc2sMLDM3ZcDlYFX3X0d8CMwH5iXZcHKsa4iMMrd\n15lZjv3XYd39ImA9cHKWRhcSFsVXdqWKTVKVJKldB/QBFgBfmdkw4GdgiJnVA7oS+U18Q5YFK8eM\nQ9x8vQZoaWYF3X1r0K8bsNrde2V6kCGloUglNjmEFJXaiUAtItfSGgBtgV7AS0SmajcGurr70iwK\nV44hKX52ugLbgO3AV8ClwNVm9iuR62kPAOdlVawSTkpscoAU/zDdBJQAarj7JmBcMG27DXA38IK7\nj866aOVYk2KiyCVEnsV2N3ADkRmzNxH5JSk30MPdl2VRqKGkRZB1jU0OIsVv21cCM4HSZvZh8PkY\nYBKRqdn6r0gOYGZ1gc5Aa6A0sAEYDDR29wfc/RLgCnf/KeuiDCldZFNik38lXVHEzOoTGTYa5O6j\ngFOAKmb2AYC7jwQGBFWcHOfM7IRgeSzMrBawC+hBJLm1dfdWwOvAh2Z2GYC7b8+qeCXcNBQpwAHD\njxcC1YisDtHazGa6+/xgksjvZva2u/fcP2Vbjm9mFgdUAc41s5JAMeBSd98ZzKJ9P+i6GXgWmJ41\nkR4fsnGhFTVKbAIkG348m8i1kPZEkttlQCczSwiGjSqYWYWsi1SOJcEvRPHBZJD7gabA3e6+M+gS\nB7Q3s1OJTBJp7e6rsijc0MvuazxGi4YiJVGw7uP1wCx33+vuPwIjgXzAJWZWA0AX+wUgqMbODjar\nEFnz8WWgnpmdB+DuLwGfELnHsZOSWriYWRkz+8bMFprZAjO7NWgvYmbjzWxx8GfhJPvcZ2ZLzOxX\nM2ufpL2+mf0UfDYw6aWRI6WK7Th2kHuNlhFZpb+imdV29/nu/n1wI+2ZRG6iFdkvB9DczB4EcPem\nZlaMyEzI88zsbyLLZO0BPti/VqQcXZk8KzL+/9u7/yCryjqO4++PpAbsCo1Mq5G1k0qGmhsLRlKN\n0zCMWBZ/RIOatclgYMOEGeNO2kxNzciMU9M4pthP8MeoNGhRRgzqpETs8mMLfwGL5WgQ4W7jgJpa\nQ5/+eJ616w3YH9z27j33+2LucDj33PM8y+zc73me85zvF7jWdpekRmCbpPVAG/Cw7WWS2oF24DpJ\nk0k1HM8mPZD/kKRJtg8Bt5HyznYCvyZdNK0dSqdixFanyu6pXSJpNukXbTHQA8yVdC6A7d8CN9ru\nrZlHT+AAAAblSURBVFZ/w8gh6RSA/DD+fmAysDPv6wV+ScpAcx1wJ2kGIILacBnGVZG299nuytsv\nATuAiaRFQyvzYSuBOXn7U8C9tl/PMz/PAOfne7Mn2e7I30t3lHxm0CKw1TlJV5OKhX4Y+AlwTX6N\nB9ryFRaRlDYASDoL+Kuk70m6DFhO+rLqkXRrvmB6FlgPXAlMt91dxS6HYSKpGfgAacTVlFPtAfwN\naMrbE0m5Zvvsyfsm5u3y/UMSga3OSHqXpLG2nTOKfIa0gu164AJgITCXVDx0FOn5oxD6vAz8njRl\nPZ80fTQOWAccBG6RdAXp4uig7b3V6mi9qvCAbYKkrSWvqw7bptQArAaW9KVL65NHYOXp1f6v4h5b\nHZHUBFwL/EXSctsvSOolVy+2/aKkJcAM23dJWmr7X9XscxhZbO+RtBmYQlo5Oxe4gjSNvZQ0ld0G\nLLb9WrX6GSqm1/bUox2Q78GvBu62fX/evV/SqTnh9an89wJ5L3Baycffmfftzdvl+4ckRmz1pQfY\nQvoS+kJedfQMcG9+Fgng3aQsI6NIN4ZDAN70AH876Qp8Amnk1go8QQpqe4DP2366Kp0Mbyz5r8Sr\n/7Yk4MfADtvfLXlrDSlrEfnvX5TsnyfpxPzY0JnA5jxteVDS9HzOz5V8ZtBixFYHJJ0JHGd7l6S7\nSYmLZwMLbLdLug14TNLjpITGl+dVSiG8IU9f933d7Qa+Qwpq19j+eb7/tt/2i1XrZN3TcK+KnEEa\nsT8hqa9k1deAZcAqSfOB50i3PLD9lKRVwNOkC+cvlXzXXA2sAEaTVkMOaUUkgP63skQoEkknk0Zq\nvaRFIodISWkvI6XJ2mf7dkkfJCWlfT6eUwv9yQ9cPwp83/a3qt2fkLRMmepHNnRW7HwnN7xlW39T\nkSNRjNgKzvbfJc0EHiJNPZ8H3EdaBPBP4Nx8Ff5T269Xr6ehluTRfzvQLGlMSaaRUEUiMo9ABLa6\nYPuR/IT/zaTA1kR64HoeqXzIe4F7gAhsYTA6SAVmQxhRIrDVCdvrJX2VVPV6uu2VktaQskeMsX2g\nuj0Mtcb2TknzYrQWRpoIbHXE9oOS/g10SPpQlJwJxyqC2sgTU5ER2OqO7bWSTiDlaGuNVEchFEtU\n0I7n2OpSLhL6kQhqIYQiihFbnYrqxSEUUNRjAyKwhRBCYQwwKX/hxVRkCCGEQokRWwghFEkM2WLE\nFmqPpEOS/ijpSUk/kzTmGM51oaRf5e1P5mwaRzp2fK5fN9g2vpGfIRzQ/rJjVkj69CDaapb05GD7\nGEKRRGALtehV2y22zyGlBVtY+qaSQf9u215je9lRDhlPStQawoilCv6pVRHYQq3bAJyRRyq7JN1B\nyq5ymqRZkjZJ6sojuwYASRdJ2impi5KUUJLaJN2St5skPSBpe35dQMpYfnoeLd6Uj1sqaYukxyV9\ns+Rc10vqlvQ7Usqyo5K0IJ9nu6TVZaPQmbnIY7ekT+TjR0m6qaTtLx7rf2QohuEsWzNSRWALNSvX\nkJtNqgUGqbbTrbbPBl4BbgBm2p4CbAW+IumtwA+BS0glV045wulvBh61fR6pqOZTpDpkf8qjxaWS\nZuU2zwdagFZJH5XUSsrD2QJcDEwbwI9zv+1pub0dpOrUfZpzGx8HluefYT5wwPa0fP4Fub5VCHUv\nFo+EWjS6pPbTBlKhw3cAz9nuyPunA5OBjbmE2AnAJuAs4FnbuwEk3QUcrtz9x0jFDsn1og5IelvZ\nMbPy6w/53w2kQNcIPNCXbirn5OzPOZK+TZrubADWlby3Kj9Mv1vSn/PPMAt4f8n9t3G57e4BtBUK\nrIYHWhUTgS3Uoldtt5TuyMHrldJdwHrbl5Yd96bPHSMBN9q+vayNJUM41wpgju3tktqAC0veKy+a\n6Nz2YtulARBJzUNoOxRJRLaYigyF1QHMkHQGgKSxkiYBO0k1xE7Px116hM8/DCzKnx0laRzwEmk0\n1mcdcGXJvbuJkt4OPAbMkTRaUiNp2rM/jcA+SccDl5e9N1fScbnP7wF25bYX5eORNEnS2AG0E0Lh\nxYgtFJLtnjzyuUfSiXn3Dba7JV0FPCjpH6SpzMbDnOLLwA9yaftDwCLbmyRtzMvp1+b7bO8DNuUR\n48vAZ213SboP2A68AGwZQJe/DnSSqp13lvXpeWAzcBKw0PZrkn5EuvfWlQvF9gBzBva/E4qsllcz\nVors8lmOEEIItai1dao3dm6t2PlGH69ttqdW7ITDJAJbCCEUhKTfABMqeMpe2xdV8HzDIgJbCCGE\nQonFIyGEEAolAlsIIYRCicAWQgihUCKwhRBCKJQIbCGEEAolAlsIIYRCicAWQgihUCKwhRBCKJQI\nbCGEEArlP2JWG9weMDAzAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7fb0e79cada0>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plot(actual_value = df.loc[:,'Actual'].values.astype(int),\n",
    "     pred_value = df.loc[:,'Prediction'].values.astype(int))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.538827Z",
     "start_time": "2017-07-24T00:01:49.532672Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Actual\n",
       "0.0    2152\n",
       "1.0    9698\n",
       "Name: Actual, dtype: int64"
      ]
     },
     "execution_count": 31,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df_.groupby(by=\"Actual\").Actual.count()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.827300Z",
     "start_time": "2017-07-24T00:01:49.540182Z"
    }
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbQAAAGhCAYAAAAJL0FuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XmcTuUbx/HPNWPfQoSyJiXJvktlHxFCaNWe0p4K7Ur7\n9msVbaJCVGRf25SdFi2WUpIIlSVkuX5/PMf02GYGz8yY83zfr9fzmnPuc59zX0+/+c3lvs997mPu\njoiISFaXkNkBiIiIxIISmoiIhIISmoiIhIISmoiIhIISmoiIhIISmoiIhIISmoiIhIISmoiIhIIS\nmoiIhEK2zA5ARETST2KBMu47tsTkWr7lj4nunhSTi6UDJTQRkRDzHVvIeVLnmFxr68IXisTkQulE\nCU1EJNQMLD7uLsXHtxQRkdBTD01EJMwMMMvsKDKEEpqISNhpyFFERCTrUA9NRCTsNOQoIiJZn2Y5\nioiIZCnqoYmIhJ2GHEVEJMszNOQoIiKSlaiHJiISahY3Q47qoYmIhJ0lxOaTlqbMbjazRWb2jZm9\nY2a5zKywmU02syXBz0JR9Xub2VIz+8HMWkaV1zSzr4Njz5qlnpWV0EREJCbM7DjgBqCWu1cGEoGu\nQC9gqrtXAKYG+5hZpeD4KUAS8KKZJQaXewm4EqgQfFJ9bY0SmohI2JnF5pM22YDcZpYNyAP8BrQD\nBgXHBwHtg+12wFB33+buPwFLgTpmVgIo4O4z3d2BN6POSbFhEREJrZg+WF3EzOZG7Q9w9wG7d9x9\npZk9AfwCbAEmufskMyvm7quCar8DxYLt44CZUdf7NSjbHmzvXZ4iJTQREUmrte5e60AHg3tj7YBy\nwF/Au2Z2YXQdd3cz8/QITglNRCTMMvb1Mc2An9z9DwAzew9oAKw2sxLuvioYTlwT1F8JlIo6v2RQ\ntjLY3rs8RbqHJiIisfILUM/M8gSzEpsC3wGjgW5BnW7AqGB7NNDVzHKaWTkikz9mB8OTG8ysXnCd\ni6POOSD10EREwi6DVgpx91lmNgKYD+wAFgADgHzAcDO7HPgZ6BzUX2Rmw4Fvg/o93H1ncLlrgTeA\n3MD44JMii0wgERGRMErIf5znrHF1TK619ZN756V0Dy2zachRRERCQUOOIiJhlxAfS18poYmIhJlW\n2xcREcla1EMTEQm7OFltXwlNRCTUYrr01REtPr6liIiEnnpoIiJhpyFHEREJBQ05ioiIZB3qoYmI\nhNnBvZwzS1MPTUREQkE9NBGRsNM9NJGsz8xym9mHZva3mb17GNe5wMwmxTK2zGJmjczsh8yOQzLQ\n7mHHw/0c4ZTQ5IhgZueb2Vwz22Rmq8xsvJmdFoNLdwKKAUe7+7mHehF3f8vdW8QgnnRlZm5mJ6RU\nx90/dfeTMiomkYyiIUfJdGZ2C9AL6A5MBP4FWgJtgc8O8/JlgMXuvuMwrxMKZpZN/y3ijVYKEckQ\nZnYU0JfIm2rfc/fN7r7d3ce4++1BnZxm9oyZ/RZ8njGznMGxM83sVzO71czWBL27S4Nj9wP3AF2C\nnt/lZnafmQ2Jar9s0KvJFuxfYmY/mtlGM/vJzC6IKv8s6rwGZjYnGMqcY2YNoo59ZGYPmNmM4DqT\nzKzIAb7/7vhvj4q/vZmdZWaLzWy9mfWJql/HzL4ws7+Cus+bWY7g2CdBtS+D79sl6vp3mNnvwOu7\ny4Jzygdt1Aj2jzWzP8zszMP6H1aOLBpyFMkQ9YFcwPsp1LkTqAdUA6oCdYC7oo4XB44CjgMuB14w\ns0Lufi/wEDDM3fO5+6spBWJmeYFngVbunh9oACzcT73CwNig7tHAU8BYMzs6qtr5wKXAMUAOoGcK\nTRcn8t/gOCIJeCBwIVATaATcbWblgro7gZuBIkT+2zUl8qp63P30oE7V4PsOi7p+YSK91auiG3b3\nZcAdwBAzywO8Dgxy949SiFfkiKSEJpntaGBtKsNgFwB93X2Nu/8B3A9cFHV8e3B8u7uPAzYBh3qP\naBdQ2cxyu/sqd1+0nzqtgSXuPtjdd7j7O8D3wNlRdV5398XuvgUYTiQZH8h2oJ+7bweGEklW/3P3\njUH73xJJ5Lj7PHefGbS7HHgZOCMN3+led98WxLMHdx8ILAVmASWI/ANCwmL3+9Bi8TnCHfkRStit\nA4rsHvI7gGOBn6P2fw7Kkq+xV0L8B8h3sIG4+2agC5F7eavMbKyZVUxDPLtjOi5q//eDiGedu+8M\ntncnnNVRx7fsPt/MTjSzMWb2u5ltINID3e9wZpQ/3H1rKnUGApWB59x9Wyp1JUsxJTSRDPIFsA1o\nn0Kd34gMl+1WOig7FJuBPFH7xaMPuvtEd29OpKfyPZE/9KnFszumlYcY08F4iUhcFdy9ANCHyL/B\nU+IpHTSzfMAzwKvAfcGQqkiWo4Qmmcrd/yZy3+iFYDJEHjPLbmatzOyxoNo7wF1mVjSYXHEPMORA\n10zFQuB0MysdTEjpvfuAmRUzs3bBvbRtRIYud+3nGuOAE4NHDbKZWRegEjDmEGM6GPmBDcCmoPd4\nzV7HVwPHH+Q1/wfMdfcriNwb7H/YUcqRRZNCRDKGuz8J3EJkoscfwArgOuCDoMqDwFzgK+BrYH5Q\ndihtTQaGBdeax55JKCGI4zdgPZF7U3snDNx9HdAGuJXIkOntQBt3X3soMR2knkQmnGwk0nscttfx\n+4BBwSzIzqldzMzaAUn89z1vAWrsnt0pIREnQ47mnuJohIiIZGEJBct4zjP6pF4xDbaO7j7P3WvF\n5GLpQA9Wi4iEXRYYLowFJTQRkTAzrRQiIiKSpaiHJiISdhpyjA9FihTxMmXKZnYYEgJbt+9vhr/I\nwfv26wVr3b1oZseR1cR9QitTpiwzZs3N7DAkBBav2pjZIUhIVC1dYO+VaA6LqYcmIiJZnRE/CU2T\nQkREJBTUQxMRCTMj9dU+Q0IJTUQk1ExDjiIiIlmJemgiIiEXLz00JTQRkZCLl4SmIUcREQkF9dBE\nREIuXnpoSmgiImEWR9P2NeQoIiIxYWYnmdnCqM8GM7vJzAqb2WQzWxL8LBR1Tm8zW2pmP5hZy6jy\nmmb2dXDsWUtDN1MJTUQkxCx4Di0Wn9S4+w/uXs3dqwE1gX+A94FewFR3rwBMDfYxs0pAV+AUIAl4\n0cwSg8u9BFwJVAg+Sam1r4QmIhJyGZXQ9tIUWObuPwPtgEFB+SCgfbDdDhjq7tvc/SdgKVDHzEoA\nBdx9prs78GbUOQeke2giIpJWRcws+vUkA9x9wAHqdgXeCbaLufuqYPt3oFiwfRwwM+qcX4Oy7cH2\n3uUpUkITEQm5GM5yXOvutdLQXg6gLdB772Pu7mbmsQoomoYcRUQk1loB8919dbC/OhhGJPi5Jihf\nCZSKOq9kULYy2N67PEVKaCIiIZcJ99DO47/hRoDRQLdguxswKqq8q5nlNLNyRCZ/zA6GJzeYWb1g\nduPFUecckIYcRUTCLIOfQzOzvEBz4Oqo4keA4WZ2OfAz0BnA3ReZ2XDgW2AH0MPddwbnXAu8AeQG\nxgefFCmhiYhIzLj7ZuDovcrWEZn1uL/6/YB++ymfC1Q+mLaV0EREQk5LX4mISJZnesGniIhI1qIe\nmohIyMVLD00JTUQk7OIjn2nIUUREwkE9NBGRMDMNOYqISEjES0LTkKOIiISCemgiIiEXLz00JTQR\nkRDTg9UiIiJZjHpoIiJhFx8dNPXQREQkHNRDExEJMz2HJiIiYREvCU1DjiIiEgrqoYmIhFy89NCU\n0EREwi4+8pmGHEVEJBzUQxMRCTkNOYqISJZnpqWvRERSdE/Pazmz+vF0aFZ3j/LvF33Fhe2a0Dmp\nIee1PoOvF87d4/iqlSuoV7EEg15+dp9r3nBZl32uJ5JWSmgickjanXsBL7353j7lTz90N91v6sXw\nCTO49tY+PPPQPXscf6JvH047s/k+500ZP5o8efOmW7zxbHcv7XA/RzolNBE5JDXrNqRAwUL7lJsZ\nmzZuBGDTxg0ULVY8+di0iWM4rnQZyp9YcY9z/tm8icEDn+fK629P36DjVLwkNN1DE5GYuv3eR7nm\nonN4qt9d7Nq1izffnwxEktbrLz3Ny2+N2me48YUnHuTiq64nV+7cmRGyhIR6aCISU8MHv8Jt9zzM\npFnfcds9D3PfbdcB8NLTD3Ph5T3IkzffHvW/X/QVK37+iaZJZ2dGuPHBYvQ5wqmHJiIx9eHId7jj\n/scAaNHmHO6/43oAvl4wlynjRvHMw/ewccPfmBk5cuYkMTGRb79aQKsGldmxYwfr1/3B5Z3P4tXh\n4zLza0gWpIQmIjFVtFhx5s78jNr1GzF7xseULlsegDdGTkyu89JTD5Enbz7Ou+RqADpfdAUAK1f8\nzPWXdlYyi7GscP8rFpTQROSQ3HHdpcz94jP++nMdzetU5Jpb+tCh68Xc88hzPHbfHezcuYMcOXNy\nzyP/y+xQ41scvT7G3D2zY8hUNWvW8hmz5qZeUSQVi1dtzOwQJCSqli4wz91rxeJaOYtX8JIX7PvM\n36H48amzYhZXelAPTUQkxAyIkw6aEpqISLhljWfIYkHT9kVEJBSU0OLY1q1bOa1+HerUqEqNqqfw\nwP33Jh9bv349rZOaU/nkCrROas6ff/4JwPbt27ni0m7UqnYq1U49mccffTizwpdD0KpBZTo2r0fn\npIZ0TmrIwrmzUqxfr2KJw27z7lu606rhqXROakiXsxrx5byU29yfjyaN49UXngIiq40sW/x98rEX\nnnyQmZ9OP+w4w8wsNp8jnRJaHMuZMycTJk9j9vwvmTV3IZMmTmDWzJkAPPHYI5zZpCnffLeEM5s0\n5YnHHgFg5Ih32fbvNuYu/JrPZ83jlYEv8/Py5Zn4LeRgvTJsLMMnzGD4hBlUq5UxCwHf0ucBhk+Y\nwY297uOB3jcd9PlntjiLy3vcAsD0iWP4ccl/Ca3HrXdRr1HjmMUaRvGy9JUSWhwzM/Lli6zasH37\ndnZs3578Szvmw1FceFE3AC68qBsfjv4g+Zx/Nm9mx44dbNmyhRw5cpC/QIHM+QISE/9s3sSVXc+m\ny1mN6Ni8HtMnjd2nzh+rf+fSTkl0TmpIh2Z1mT/rcwA+/2QqF7VvSpezGtGz+8X8s3lTim3VrNOQ\nFct/BP5blb9Ti/rcdOX5bPgrMgrw1msvcU6T2nRqUZ/be1wCwKh33+Khu29l4dxZfDR5HE89dDed\nkyLXuvuW7kwe+wEzPppMz+4XJ7c154tPue6Scw8pTsmalNDi3M6dO6lbsxqljz2GJs2aU6du5F/s\na1avpkSJyHBT8eLFWbN6NQAdOnYiT968lCtVghOPL81NN/ekcOHCmRa/HLwrurSmc1JDLmgb6dXk\nyJmLpwe+xbBxn/LKsLE8+UAf9n6cZ9yod2lwRlOGT5jBuxM/56RTTuXP9esY+OzjvPz2aIaN+5RK\nVarz5sDnU2z74ynjOaFiJQDuuvlqburdlxGTvqBCxUr0fyYyCvD6i08zbPxnjJj0BXc99Mwe51er\nVZczm5+V3OMrVfb45GN1T2vM1wvn8s8/mwGY+OF7JLXteEhxhkqMhhuzQAdNsxzjXWJiIrPmLeSv\nv/6iS6dzWPTNN5xSufIedaKHG+bMnk1iQiI//vIbf/75J80aN6JJ02aUO/74/V1ejkCvDBtLocJH\nJ++7O88+dj/zZ31OQkICa35fxbo/1lDkmGLJdSpXrcG9Pa9lx/btNG7ZhoqnVGHulPH8uOR7LunQ\nAoDt//5LlZp19tvmUw/dzcDnHqdQ4SLc9/gLbNzwNxs3/E2teqcB0Lbj+fS8NjIiUOHkU+h9wxU0\nbtmaJi3bpPl7ZcuWjYZnNOPjKeNpflZ7Pp02kZv79GXuzM/SHGcYGZCQkAWyUQwooQkABQsW5Iwz\nGzNp0gROqVyZY4oVY9WqVZQoUYJVq1ZR9JhjABg+9G1atEwie/bsHHPMMdSv35B58+YqoWVh4z4Y\nzp/r1vHO2E/Inj07rRpUZtu2rXvUqVm3Ia+9O4FPp03knluv4aIrepD/qELUa9SYR59/PdU2bunz\nAM1bt0/e37jh7wPWff6NEcybNYOPp4znleefYMSkmWn+Li3bdmTooAEcVbAQlapUJ2++/LiT5jgl\na9OQYxz7448/+OuvvwDYsmULU6dM5qSTIu+pat2mLUMGDwJgyOBBtDm7HQAlS5fmo+nTANi8eTOz\nZ89MPkeypk0b/qZwkSJkz56d2Z9/wm+//rJPnd9+/YWjix5Dx/Mv4ZyuF/PdN19SpUZtFs6dxS/L\nlwHwzz+bWf7jkjS1mb/AURQ4qmDyvbgx7w2lVt2G7Nq1i99/+5U6DU7npt592bRhwz73u/Lky8fm\nA9wDq1XvNL7/5kvee3sQSWd3BDisOMMiI4cczaygmY0ws+/N7Dszq29mhc1sspktCX4Wiqrf28yW\nmtkPZtYyqrymmX0dHHvW0jArRT20OPb7qlVceVk3du7cyS7fRcdOnTmrdWSIp+ftvbjwvM4Mev1V\nSpcuw5B3hgPQ/ZoeXHXFpdSoegruzkXdLuXUKlUy82vIYTrrnC7ccFlnOjavR6Uq1Sl3won71Jn7\nxae88fKzZMuenTx58vLg0y9T+Ogi9H3yJXpddxn//vsvANf1vJuyx1dIU7sPPNWfB/vcxNYtWyhZ\nuix9n3iRnTt30ufGK9m0cQPuznmXdqfAUQX3OC/p7E70veN63n69P0++9OYexxITE2nUtCWj332b\nB57uD3DYcYZBBs9Q/B8wwd07mVkOIA/QB5jq7o+YWS+gF3CHmVUCugKnAMcCU8zsRHffCbwEXAnM\nAsYBScD4lBrWWo5ay1FiRGs5SqzEci3H3CVO9PKXvxCLS7GoX4sU4zKzo4CFwPEelVzM7AfgTHdf\nZWYlgI/c/SQz6w3g7g8H9SYC9wHLgenuXjEoPy84/+qU4tOQo4hImGXsLMdywB/A62a2wMxeMbO8\nQDF3XxXU+R3YPePoOGBF1Pm/BmXHBdt7l6dICU1ERNKqiJnNjfpctdfxbEAN4CV3rw5sJjK8mCzo\nuaXL0KAS2hGuUYO61K1ZjQrHl6ZUiaLUrVmNujWrxXx1jmVLl5I7uzGg/0vJZddf25133hoS03bW\nr1/PwJf7J++vWLGCC8/vEtM2JO0uaNuYzkkNaVmvEmdWK5e8JNbKFT+nS3vPP96XIa9Ehr9633gF\n0yaO2adO7xuvSF4qq3NSQy7p2HKfOpJ2kdX2Y7ZSyFp3rxX1GbBXc78Cv7r77vXNRhBJcKuDoUaC\nn2uC4yuBUlHnlwzKVgbbe5enSJNCjnCffh75vRg86A3mzZvLM8/u/4HQnTt3kpiYeFhtFStWjOee\nfZrLrriSbNnS51fjz/XreWVAf668ujsApUqVYsjbw9KlLUndW6MjayCOevctFn01nz4PPJnJEUXc\nds/DKT6DtmPHjj1+R/feT+t58SHjlq1y99/NbIWZneTuPwBNgW+DTzfgkeDnqOCU0cDbZvYUkUkh\nFYDZ7r7TzDaYWT0ik0IuBp5LrX310LKoHTt2ULxIQXrechO1q1dhzuzZlC9bMnka/qyZMzmrZTMA\nNm3axJWXXcJp9etQr1Z1xo75cL/XLFasOA0bNuLtIYP3ObZ0yRLOPqslDerUpFnj01myeHFyeaMG\ndalV7VTuvftOiheJzEjbsGEDSc2bUL92DWpXr8K4sZF/id91Zy8WL/6BujWrcVefXixbupS6NasB\n0LBuLRb/8ENym03OOI0vFy5Mc/wSOyPeep0nH7wzeX/44Fd4qt9d/LJ8Gec0rcPtPS6hfZNa3HZN\nN7Zu3QLAN1/O47JzW9H1rNO59uIOrPtjzYEuf0ief7wvd950Fd3Oac7dt3bnvXcGcdMV53F5l9Zc\nc+E57Nq1i8f79qZDs7p0bF6PyWMjy7XN/HQ6l3c+i+suOZdOLerFNCbZr+uBt8zsK6Aa8BCRRNbc\nzJYAzYJ93H0RMJxIwpsA9AhmOAJcC7wCLAWWkcoMR1BCy9L+/vtvTmt0OnMWfEW9+vUPWO+hB/vS\nvGUSn30xm/GTp9Hr9lvZunXrfuv2vL0XTz/1OLt27dqjvMc1V/G/517k89nz6Pvgw9x843UA3HLT\n9dx0S0/mLvya4sX/W5k9d+7cDB/5AV/Mmc/YiVO4vefNADzY7xFOPPEkZs1byIMPPbJHGx07d2Hk\niMjjAb/++it//rmeqtWqHVT8EhtJbTsybcKH7NixA4APhg+hfeeLAPhxyfdcePm1fDBtLjly5mTE\nkNf4d9s2HrvvDp58eQhDx31C63O68MITDx5y+4/37Z085HjnTf/dpvlp2WIGvPMhD//vFSCyHuTT\nLw9h4NAPmTTmfX5a+gPvTvycl98axeN9e7Nu7R8ALPpqAXf2e4oPpsXnjOaMfA7N3RcGw5FV3L29\nu//p7uvcvam7V3D3Zu6+Pqp+P3cv7+4nufv4qPK57l45OHZd9KzJA0m3vreZOfCUu98a7PcE8rn7\nfenV5n5ieAMY4+4jMqrNjJQjRw7atT8n1XpTJ09i0oTxPBmsmL9161ZW/PILFU7c93mjEypUoErV\narw7/L9hwL/++ovZs2ZyXueOyWU7dkb+0M2ZPYsPPhwHQJfzzuf+e+8CIssp3d2nF5/P+IyEhAR+\nXbGCtWvXphhnx06d6dT+bHrfeTcj3h1Gh47nHnT8Ehv58hegRt2GfDZ9MiVLlyUxMZHjK5zEL8uX\ncVypslSpEVk6qnWHLox86w1q1W/EssXfc/X5bYHIEHixEscecvsHGnJs3KI1OXPlSt6v36gJBQpG\nntFdMPcLWrXtRGJiIkWOKUb12vX49qv5ZM+eg6o16lDiuFL7XC9eZIWV8mMhPQeTtwEdzOxhd0/5\nL9l+mFk2d9+RDnGFRu7cuff4Rc2WLVtyzyp66SJ3Z/jIDzi+fPk0XfeOXndyyUXnU6duveTzjy5S\nhFnzFqY5trcGv8nff//NF3Pmky1bNsqXLZlqr6pMmTLkzZeP7779lhHDhzHw1TcOKX6JjQ5duzH4\nlec5tmRp2p17YXL53n8bzQx3p0LFU3hj5MR0jSl37jx77ufJc4Cae52XxnqStaXnkOMOYABw894H\nzKysmU0zs6/MbKqZlQ7K3zCz/mY2C3jMzO4zs0Fm9qmZ/WxmHczssWA5lAlmlj047x4zm2Nm35jZ\ngLQskRJGZcqUZcH8eQC8/97I5PJmLVry4gv/3U9duGBBitepdMoplCtfnokTI73/QoUKUbx4CUZ9\n8D4Au3bt4qsvvwSgVu06yeXvDhuafI2/N/xN0WOOIVu2bEydMpnfVkYmKOXLn5+Nmw78AHKnc7vw\n+KMP8++2bZxcqdIhxS+xUb12PX79+Scmj/2Almd3SC5fueJnvvky8ns2/oN3qV67HuUrVGTN76v4\nemFkSG/7v/+y9IfvMjTeGrUbMOHDkezatYt1f6xh4dxZVKpSI0NjOCLF0Wr76X0P7QXgguDp8WjP\nAYPcvQrwFvBs1LGSQAN3vyXYLw80AdoCQ4g8PX4qsAVoHdR53t1ru3tlIDeQ9iW6Q+Sue+7jxuuv\npWG92uTIkSO5/M677+WfzZupVe1UalQ9hX4P3JfqtXr1votfV/z3vOPgt4byyoD+yW+3Hj8uMsnj\nyaef5cnHH6V29SosX/4TBY6K/E99/gUXMfOLz6lV7VTeHTaUEypElhkqVqwY1WvUpFa1U7mrT699\n2u3Q6VyGDX2bjud2Pqz4JTaandWOGnUbkL/Af/8XLnfCSQwe+Dztm9Ri65YtdLzgUnLkzMmT/d/k\nib596NSiPp1bnZac3A5F9D20zkkN2blzZ6rnNG/dnrLlT6RTi/pcdX5bet79EEcXKXrIMYRFjKft\nH9HSbekrM9vk7vnMrC+wnUgCyufu95nZWqCEu28Pelmr3L1IcM9rursPCq5xH7Dd3fuZWUJwjVzu\n7sF117v7M2bWEbidyJphhYHngjXD3mA/99CChwGvAihVunTNxcvS55mbeLB582by5MmDmfHOW0MY\nNep9hg4fmfqJIRTGpa+uuegcLu9xa/JrXn5Zvoye3S9m+IQZmRxZuMVy6au8x53kFbv3T71iGsy/\np0nM4koPGfFAxjPAfCCt727YvNf+NgB332Vm26NmuuwCsplZLuBFoJa7rwiSYC5SEDwMOAAiazmm\nMS7Zj3lz53DbLTexa9cuChYqxIBX9IqOMPjrz3Vc1K4plapUT05mknVlgc5VTKR7QnP39WY2HLgc\neC0o/pzICsuDgQuATw+jid3Ja62Z5QM6EXk6XTLA6WeceVCTRSRrKFjoaD78ZN//XUuXLa/eWRaU\nFYYLYyGjnkN7EigStX89cGnw4N1FwI2HemF3/wsYCHwDTATmHEacIiKSRaVbD83d80VtryZyf2v3\n/s9EJnrsfc4le+3fl8I174vavgu4K7XriYjEozjpoGktRxGRUDMNOYqIiGQp6qGJiIRY5Dm0zI4i\nY6iHJiIioaAemohIqGWNVT5iQQlNRCTk4iSfachRRETCQT00EZGQ05CjiIhkfVnk1S+xoCFHEREJ\nBfXQRERCbPf70OKBEpqISMjFS0LTkKOIiISCemgiIiEXJx00JTQRkbDTkKOIiEgWoh6aiEiY6Tk0\nERGRrEU9NBGREDOtti8iImERJ/lMQ44iIhIO6qGJiIRcQpx00ZTQRERCLk7ymYYcRUQkHNRDExEJ\nMbP4WSlECU1EJOQS4iOfachRRETCQT00EZGQ05CjiIiEQpzkMw05iohIOKiHJiISYkZkPcd4oIQm\nIhJymuUoIiJykMxsuZl9bWYLzWxuUFbYzCab2ZLgZ6Go+r3NbKmZ/WBmLaPKawbXWWpmz1oaZrYo\noYmIhJlFXh8Ti89BaOzu1dy9VrDfC5jq7hWAqcE+ZlYJ6AqcAiQBL5pZYnDOS8CVQIXgk5Rao0po\nIiKS3toBg4LtQUD7qPKh7r7N3X8ClgJ1zKwEUMDdZ7q7A29GnXNASmgiIiEXWf7q8D9AETObG/W5\naj/NOTDFzOZFHS/m7quC7d+BYsH2ccCKqHN/DcqOC7b3Lk+RJoWIiISYEdPXx6yNGkY8kNPcfaWZ\nHQNMNrOrjGGrAAAgAElEQVTvow+6u5uZxyqgaOqhiYhIzLj7yuDnGuB9oA6wOhhGJPi5Jqi+EigV\ndXrJoGxlsL13eYqU0EREQi6GQ46ptGN5zSz/7m2gBfANMBroFlTrBowKtkcDXc0sp5mVIzL5Y3Yw\nPLnBzOoFsxsvjjrngDTkKCISchm4lmMx4P2gvWzA2+4+wczmAMPN7HLgZ6AzgLsvMrPhwLfADqCH\nu+8MrnUt8AaQGxgffFKkhCYiIjHh7j8CVfdTvg5oeoBz+gH99lM+F6h8MO0roYmIhFhahwvDQAlN\nRCTkYjjL8YimSSEiIhIK6qGJiIRcfPTPUkhoZlYgpRPdfUPswxERkVjTG6thEZElTKL/S+zed6B0\nOsYlIiJyUA6Y0Ny91IGOiYhI1hBZ+iqzo8gYaZoUYmZdzaxPsF3SzGqmb1giIhITmfP6mEyRakIz\ns+eBxsBFQdE/QP/0DEpERORgpWWWYwN3r2FmCwDcfb2Z5UjnuEREJEayQOcqJtIy5LjdzBKITATB\nzI4GdqVrVCIiIgcpLT20F4CRQFEzu5/IopL3p2tUIiISM1nh/lcspJrQ3P1NM5sHNAuKznX3b9I3\nLBERiYV4muWY1pVCEoHtRIYdtVyWiIgccdIyy/FO4B3gWCJvDX3bzHqnd2AiIhIb8TJtPy09tIuB\n6u7+D4CZ9QMWAA+nZ2AiIhIbR34qio20DB+uYs/Ely0oExEROWKktDjx00Tuma0HFpnZxGC/BTAn\nY8ITEZHDYRY/70NLachx90zGRcDYqPKZ6ReOiIjEWpzksxQXJ341IwMRERE5HKlOCjGz8kA/oBKQ\na3e5u5+YjnGJiEiMZIUZirGQlkkhbwCvE5ko0woYDgxLx5hERCSGzGLzOdKlJaHlcfeJAO6+zN3v\nIpLYREREjhhpeQ5tW7A48TIz6w6sBPKnb1giIhILhmmWY5SbgbzADUTupR0FXJaeQYmIiBystCxO\nPCvY3Mh/L/kUEZGsIIvc/4qFlB6sfp/gHWj74+4d0iUiERGJqXiZ5ZhSD+35DIsiE/27cxe//bkl\ns8OQEKjbVmt2i2SmlB6snpqRgYiISPqIl3d+pfV9aCIikgUZ8TPkGC+JW0REQi7NPTQzy+nu29Iz\nGBERib2E+OigpemN1XXM7GtgSbBf1cyeS/fIREQkJhIsNp8jXVqGHJ8F2gDrANz9S6BxegYlIiJy\nsNIy5Jjg7j/vdVNxZzrFIyIiMRRZWDgLdK9iIC0JbYWZ1QHczBKB64HF6RuWiIjESlYYLoyFtAw5\nXgPcApQGVgP1gjIREZEjRlrWclwDdM2AWEREJB3EyYhjmt5YPZD9rOno7lelS0QiIhIzBnp9TJQp\nUdu5gHOAFekTjoiIyKFJ9R6auw+L+gwCOgA10z80ERGJhYQYfdLKzBLNbIGZjQn2C5vZZDNbEvws\nFFW3t5ktNbMfzKxlVHlNM/s6OPaspWGq5qEsfVUOKHYI54mISHy4Efguar8XMNXdKwBTg33MrBKR\nORqnAEnAi8FseoCXgCuBCsEnKbVG07JSyJ9mtj74/AVMBvSeDBGRLMIsNp+0tWUlgdbAK1HF7YBB\nwfYgoH1U+VB33+buPwFLgTpmVgIo4O4z3d2BN6POOaAU76EFXbyqwMqgaFdwcRERyQLMLJaTQoqY\n2dyo/QHuPmCvOs8AtwP5o8qKufuqYPt3/hvlOw6YGVXv16Bse7C9d3mKUkxo7u5mNs7dK6d2IRER\nCb217l7rQAfNrA2wxt3nmdmZ+6sT5JV06RilZZbjQjOr7u4L0iMAERFJXxk4a78h0NbMziIyK76A\nmQ0BVptZCXdfFQwnrgnqrwRKRZ1fMihbGWzvXZ6iA95DM7Pdya46MCeYgTI/mLkyP41fTkREMllG\nrbbv7r3dvaS7lyUy2WOau18IjAa6BdW6AaOC7dFAVzPLaWbliEz+mB0MT24ws3rBra+Lo845oJR6\naLOBGkDb1L+GiIjIAT0CDDezy4Gfgc4A7r7IzIYD3wI7gB7uvnvx+2uBN4DcwPjgk6KUEpoFDS47\nxC8gIiKZLLNWCnH3j4CPgu11QNMD1OsH9NtP+VzgoOZvpJTQiprZLQc66O5PHUxDIiKSOeJk5asU\nE1oikI+gpyYiInIkSymhrXL3vhkWiYiIxF4aJ3SEQar30EREJGuzOPlzntLSV/u9gSciInIkOmAP\nzd3XZ2QgIiISe5FZjpkdRcZIy0ohIiKShcVLQjuU18eIiIgccdRDExEJuTS8GzMU1EMTEZFQUA9N\nRCTENClERETC4SDeNp3VachRRERCQT00EZGQy4zV9jODEpqISIjF0z00DTmKiEgoqIcmIhJycTLi\nqIQmIhJuRoJW2xcREck61EMTEQkxQ0OOIiISBnpjtYTZHTdezbTJEzi6SFEmfDI3ufypR+5nyvix\nJCQYRxc5hseee5lixY/l119+psVp1Tm+fAUAqtWsw4NPPAfAJV3a8sfq1ezcuYNadRtw/6PPkJiY\nmCnfS0Tim+6hxaGOXS/i9aEf7FN+ZY+bGffxbMZMn0XjFq147omHk4+VLns8Y6bPYsz0WcnJDOC5\nV4Yw9qNZjP9kLuvXrWXc6Pcy5DuISNolmMXkc6RTQotDdeqfRsGChfcpz5+/QPL2ln82p+mVE7vP\n2bFjB9u3/xs3r6kQkSOPEprs4YmH7qVhtQqMGjmMm+64O7n811+W06ZxXc5r14I5M2fscc4lndtS\np1IZ8ubLT6uzz8nokEUkBbsnhcTic6RTQpM99OxzPzMWLqFdxy4MfrU/AEWLFefT+T8wZvos+vR9\nhJu6X8LGjRuSz3lj+Ghmfv0j/27bxheffpRJkYvIgWjIUeJau45dmTB2FAA5c+akUOGjATi1ag3K\nlD2en5Yt2aN+zly5aJbUhikTxmR4rCIioIQmUX76cWny9uQJYyh/wokArFv7Bzt37gTgl+U/sfzH\npZQuU47NmzaxZvUqIHIPbfqUCRxf4cSMD1xEUhQvQ46ath+Hbry6G7NmfMKf69fRsOoJ3Hj7XXS+\n4BIef+Bufly2hARL4LhSpXjg8WcBmPPFDJ557AGyZctGQkICDzz+LAULFWbtmtVcddG5/LvtX3b5\nLuo1PJ3zu12Zyd9ORKIZ8dNzUUKLQ/97edB+y198/Z39lied3Z6ks9vvU17kmGJ8MOmzmMYmInKo\nlNBERMLMiJvHaZTQRERCLj7SWfwMrYqISMgpoR2hTq9ZkVZn1KZN47q0aVyXebNnplj/1LJFD7vN\n266/igZVyrNt2zYA1q9by+k1Kx72dfc2adxolvzwXfL+04/0ZcbH02LejqSv6y9ozLwRdzL33T4M\nevgScuaIDPjcc21rZg/rzcyhvfjwxR6UKHrUHueVKl6IP2Y8yU0XNU0umzjwRr58/25mDu3FzKG9\nKFooX4Z+lzAz4uc5NA05HsHeem88hY8ukqFtJiQmMuLtQVxw6VXp1sbk8WNo0qIVFU46GYCbe92T\nbm1J+ji26FFce94ZVO/Yj63btjPk0cs4t2VNhnw4i6cHTaXvi2MBuPa8M+h9VStu6Dc0+dxHb+3A\npBmL9rnmpXcOYv63v2TYd4gnR34qig310LKQzZs2cWHHs2jbtD6tzqjN5PEf7lNnzepVdG3bnDaN\n65J0eq3kZao+nT6FTq3OpG3T+lx3+QVs3rRpv21cetV1vPby8+zYsWOfYwOef5r2LU7jrDPq8Myj\nDySXP/fkwzSrX5XObZpy49XdGPjCMwAMHfwa7VucRusz63Ltpeex5Z9/mDd7JlMnjuWR+/vQpnFd\nfv7pR267/irGf/g+H0+bxHWXX5B83ZkzPuGKCzocVPyScbIlJpI7Z3YSExPInSsHq/74G4CNm7cm\n18mTOyfunrx/9plVWL5yHd8u+z3D45XwU0I7gl3QoRVtGtelQ9LpQGQ1jpfeGMroqV/w1nvjeeje\n3nv8sQAYPXI4jRo3Y8z0WYydPouTK1dh/bq1vPD0o7w5Yiyjp37BqVVr8Fr/Z/fb5rElS1Grbn0+\nePftPco/nT6F5T8t5f2JnzJm+ky++WoBs7/4jK8WzGXimA8YO30Wrw39gK8Xzk8+p2Xrdnww6TPG\nfjSL8ieexPC3B1GzTj2atmxNr3sfYsz0WZQpd3xy/YanN2Hh/Dn8s3kzAGM/GEGb9uceVPySMX77\n42+eeXMqi8c/wE+T+7Fh0xamzvw++fh9Pc5myfgH6NqqFg+8FOmt5c2dg1svbU6/l8ft95oD+17E\nzKG96HVlUoZ8h3iiB6sl0+095OjuPNnvXmZ/MYOEBGP177+xds1qihYrnlynSvWa3HFjd3Zs307z\nVmdT6dSqTPv8U5Yu/p7ObZoAsH37dqrXqnPAdrvfeBvdL+7Mmc3++8Py6UdT+eyjqZzdpB4Amzdv\nZvmPS9m8aRPNktqQM1cucpKLpi3OSj5n8fff8tTD97Nhw9/8s3kTjc5sluL3zZYtG6c3ac7USeNo\ndfY5TJ8ygTvu7cfsg4xf0l/B/Llpc+apnNzmXv7a+A9vP3Y5Xc+qzdBxcwC474UPue+FD+l5WQu6\ndzmdB/uP467urXluyDQ2b/l3n+td2ucNfvvjb/Llyck7T1zB+W3q8PaY2Rn9tULKNG1fjjyjRg5l\n3bq1jJoyg+zZs3N6zYrJEzh2q1P/NIaOnsT0yRO4/YaruKz7DRxVsCANz2hywAeq91bu+BM4uXIV\nxo0e+V+hO91v6Mn53a7Yo+7rLz9/wOvcfsNV9H9jGCdXrsKIoYOZNePTVNtu0/5cBr/an4IFC3Fq\n1Rrky5cfdz+o+CX9NalbkeW/rWPtn5Gh3w+mfUm9quWSE9puw8bN4f3nruHB/uOoXbkM5zSrRr+b\n2nNU/tzs2uVs/Xc7/Yd9wm/BcOWmf7YxbPxcap9SRglNDpqGHLOQjRs2cHSRomTPnp0vPvuYlSv2\nvYG+csUvFClajK4XXUbnCy9h0dcLqVazDvNmf8HyH5cB8M/mzfssLry3a2+6nVde/F/yfqPGzRjx\nzpvJ965+X7WStX+soWadekybNI5tW7eyedMmpk0en3zO5k2bKFqsONu3b2f0iGHJ5Xnz5WPzpo37\nbbdug0Ys+nohw4a8TptzzgU4pPglfa34fT11Ti1H7lzZAWhc5yR++Gk1AOVL/zfjts2ZVVi8PFLe\n7PJnqNj6Xiq2vpfn3/qIx1+dRP9hn5CYmMDRBfMCkC1bAmedXplFy1Zl8DcKr91LX8Xic6RTDy0L\nadexC1dd2IlWZ9Tm1KrVKV/hpH3qzJzxCQNffIbs2bKRJ28+nnj+FY4uUpTHnh3ATd278e+2yHDP\nLb3voVz5Cgds68SKlTjl1Gos+nohEEloS5f8QKfWjQHImycvT774GlWq16Jpy9acdWYdihQ9hpNO\nPoX8BSIv/bz5jrvp2OoMCh9dhKo1aicnwzbtz+XOW3swaOCLPP/qnvfqEhMTadK8FSOHDuHx5wYC\nHFL8kr7mfPMz709ZwBdv38GOnbv48vtfeXVkZALSgze0o0KZY9i1y/ll1fo9ZjjuT87s2Rj9Qg+y\nZ0skMTGB6bO+57X3ZqR4jhyZzCwX8AmQk0h+GeHu95pZYWAYUBZYDnR29z+Dc3oDlwM7gRvcfWJQ\nXhN4A8gNjANu9L0nDezdfirHQ+/UajV81GT9n+dwbN60ibz58rHln3/o2q45/Z58nspVqmd2WBnu\nlBa3ZXYIEhJbF74wz91rxeJa5StV9YffHp96xTToUv24FOOyyM26vO6+ycyyA58BNwIdgPXu/oiZ\n9QIKufsdZlYJeAeoAxwLTAFOdPedZjYbuAGYRSShPevuKX4R9dDksN3Z8zqW/vAd27Zto0OXC+Iy\nmYkcyTJqSkjQg9r9TE324ONAO+DMoHwQ8BFwR1A+1N23AT+Z2VKgjpktBwq4+0wAM3sTaA8ooUn6\neqb/G5kdgogcIcwsEZgHnAC84O6zzKyYu+++Mfo7UCzYPg6IXgbp16Bse7C9d3mKlNCyqA5Jp/Pv\ntm389defbNu6lWLFSwDQf9BwSpYuE/P2nnz4PgoXLsKlV1+3T/nIdwbv8XjB0A+nkC9f/pjHILHz\nyZs9yZEjG4UL5CFXruz8tiYyy7DzzQP4ZdX6mLVzfKkizB3eh8U/ryFH9kQ+nrOEmx8ZftDXGf1C\nD86/7RWyZ0ukY4savDIi8tqiksUK8vDN53BRr9djFnPoxHa1/SJmNjdqf4C7D4iu4O47gWpmVhB4\n38wq73XczSxd7nUpoWVR7034BIARQwfzzcL53PfI05kWy5U9bt4n0UXbsWMH2bJlO+D+gbg77k5C\nQlaYX5W1nH7xEwBceHZdalYqzc2PvrvfegkJxq5dh/e3Z/HPa6jX9RGyZUtg0sAbaX3GqYz9+OuD\nukbbHi8AkQR5RafTkhPar6v/UjJLRYxf8Lk2rff23P0vM5sOJAGrzayEu68ysxLAmqDaSqBU1Gkl\ng7KVwfbe5SnSX4qQeefNV3no3t7J+2+9PoCH7+vD8h+XkdSoJjdcdTEtGlbn+isuZOuWLQB8tWAu\n57VrQdtmDbi0azvWrll92HEMG/I6V1/cmfPPSeKSLm2Z8fE0zm/fkisu6MBZZ9QG4OXnniLp9Fok\nnV6LQa+8BMDyH5fR8rQa3Nz9UpIa1WTNak3fzkiJiQms+uQxHu/ZkdnDelO7clmWTniAo/LlBqDO\nqWUZ2z/yj5e8uXMw4P4L+XRwT7545w7OOr1ySpdmx45dzPpqOeVLFcXMePTWDsx9tw9zhvfhnGbV\ngMgakVNfu5mZQ3sx990+1KtaDiA5hgdvaMeJZY5h5tBePHBDW44vVYSZQ3sB8Nlbt1OhzDHJ7U19\n7WaqnHjcQccph87MigY9M8wsN9Ac+B4YDXQLqnUDRgXbo4GuZpbTzMoBFYDZwfDkBjOrF0w0uTjq\nnANSDy1k2pxzLm2b1Of2ux8gW7ZsjBg6OHn6+5IfvuPhp1+ieq063NrjCt5581XOv+RK+t55GwMG\nv0vho4swasRQnnq0Lw89+UKa2xz4wtOMHDoYgEKFizB4ZGSpo2+/+ZIx02ZyVMFCzPh4Gl8vnM/E\nz+ZzbMlSLJw3m9Ejh/HBxE/ZsXMHHVqeTr0GjciZKzfLlvzA488PpEq1mrH/DySpKpg/D5/NX8pt\nT4xMsV6fq1ox+fPvuOreIRTMn5tPBt/G1Jnfs+3ffdcBBciTKwdn1D6Ru/43io7Nq3NSuWLU6fIw\nRQvl47Mht/PZvKWc17o24z75miffmEJCgpE7Z/Y9rnHXs6M4vlRR6nV9BIj02HYbOXEeHVvU4JGB\nEzjumIIUOioPXy1eSb8b2x1UnGGUgSuFlAAGBffREoDh7j7GzL4AhpvZ5cDPQGcAd19kZsOBb4Ed\nQI9gyBLgWv6btj+eVCaEQCYkNDNrD7wPnOzu35tZWaCBu78dHK8GHOvu+1/wLfXrLwdqufva2ESc\nteTPX4Da9Rvy8dSJlCpTjsSERE44sSLLf1xGqdJlk5eMat+pK0MHv0bdBo1Y8sN3XNypNQA7d+2i\neIljD6rNAw05nnZmU44qWCh5v3qtuhxbMjK6MHfWFyS1aUeu3JF/+TdvdTZzZs7gtDObUbrs8Upm\nmWjbv9sZNe3LVOs1rX8yLRqewq2XNgcgV45slCpemKW/rNmj3u4e1a5dzujpXzJt1vc8dce5DJ8w\nj127nNXrNvL5wmXUOKU0cxf9wvN3dSVnjux8+NFXfL041VGmZCMnz2fEM915ZOAEOrWswXuTFxxU\nnGGWgbMcvwL2mebs7uuApvueAe7eD+i3n/K5wEF1pzOjh3YekWcTzgPuJfKg3fnA7idsqwG1iDx3\nIIeg8wWX8lr/ZylZqgwdz7souXzvf6WZGY5TsVJlhn04JeZx5MmTd4/93HnypPG8tNWT9LFl2/Y9\n9nfs3EVCQuR3J2eO/3pMZtD5lgH89GvK/3bcfQ8tLT6es5iWV/yPpEaVeeWBi3j6jSkMHT839ROB\nX1b9yeYt26h4fHE6tajBlfcOOag4JevL0HtoZpYPOI3IU+Fdg+JHgEZmttDM7gD6Al2C/S5mVsfM\nvjCzBWb2uZmdFFwr0cyeMLNvzOwrM7t+r7Zym9l4M7syA7/iEaFW3fr8svxHxn34Hm3ad0ouX/HL\ncr5aEPnjMPq94dSs24ATTjyZ1at+48v5kTX4/v33XxZ//226x1i7XgMmjRvN1i1b2LxpE1MmjKF2\nvYbp3q4cvJ9/W0/1k0sDJN/rApjy+Xdc2/WM5P2qJ5Xc59wDmTF/Kee2rImZcUzh/NSvejzzF/1C\n6RKF+H3dBl57bwaDR82kasVSe5y3afM28ufJecDrjpg4n9subUGOHNn4/sffDzvOsNBq++mjHTDB\n3Reb2bpgaZNeQE93bwNgZquJDBleF+wXABq5+w4zawY8BHQEriLSu6sWHCsc1U4+YCjwpru/mVFf\n7kiSdPY5/LhkMfkL/Pe24BNOrMir/Z/ju2++4qSTT+G8iy4jZ86cPP/aW/Tt05NNGzeya9dOLr/m\nBk6sWCnNbUXfQwMYMCTley8AVWvUps05nWnfshEA519yJSdVqpy8XqMcOR7sP44X7zmPvzdu4bP5\nS5PL+708nsdv68ic4X1ISDCWrfiDzjcPSOFK/3lvykLqVCnHnOG9cYc7nnqPP/7cxMXt6nHDhU3Y\nvmMnm/7ZxuV37bkg9Zr1G1nw3QrmDO/DhM++4fX3P9/rugt49NYO9A1eWXO4cYZBZJZjFshGMZCh\nS1+Z2Rjgf+4+2cxuAEoDY9gzoV3CngmtFPAskdkvDmR394pmNhLo7+6T92pjOfA38Ji7v3WAOK4i\nkhA5tmSpmp/O/yHm3zWzXdKlLdfceBt1G0QSxvIfl3Hd5eczZvqsTI4svLT0lcRKLJe+qnBKVX9q\n6KRYXIq2VYrHLK70kGFDjkEPqgnwSpB0biMy0yW1fzo8AEx398rA2UCuNDQ3A0iyA0ztcfcB7l7L\n3WtFPxAcBn+uX0eTuqdyVMFCyclMROKbhhxjrxMw2N2v3l1gZh8Du4DoZSU27rV/FP89UHdJVPlk\n4Gozm757yNHddy9xcE/weYHI1M+4Uajw0Uybte9Dq2WPL6/emUhcMixOhhwzclLIeUSm60cbSWRy\nyE4z+9LMbgamA5V2TwoBHgMeNrMF7JmAXwF+Ab4ysy+JzJSMdiOQ28weS4fvIiIiR5gM66G5e+P9\nlD17gOq199o/MWr7ruDcHcAtwSf6mmWjdi896EBFREImKwwXxoKWvhIRkVDQ0lciIiEWT9P2ldBE\nRMIsi8xQjAUNOYqISCiohyYiEnLx0kNTQhMRCTk9hyYiIpKFqIcmIhJiBiTERwdNCU1EJOw05Cgi\nIpKFqIcmIhJymuUoIiKhoCFHERGRLEQ9NBGREIunWY7qoYmISCiohyYiEmrx88ZqJTQRkTDTavsi\nIiJZi3poIiIhFycdNCU0EZEwi8xyjI+UpiFHEREJBfXQRERCLj76Z0poIiLhFycZTUOOIiISCuqh\niYiEnB6sFhGRUIiTSY4achQRkXBQD01EJOTipIOmhCYiEnpxktE05CgiIqGgHpqISIgZ8TPLUT00\nEREJBfXQRETCTO9DExGRsLAYfVJtx6yUmU03s2/NbJGZ3RiUFzazyWa2JPhZKOqc3ma21Mx+MLOW\nUeU1zezr4NizZqmnZSU0ERGJlR3Are5eCagH9DCzSkAvYKq7VwCmBvsEx7oCpwBJwItmlhhc6yXg\nSqBC8ElKrXElNBGRsMugLpq7r3L3+cH2RuA74DigHTAoqDYIaB9stwOGuvs2d/8JWArUMbMSQAF3\nn+nuDrwZdc4B6R6aiEioWSxnORYxs7lR+wPcfcB+WzUrC1QHZgHF3H1VcOh3oFiwfRwwM+q0X4Oy\n7cH23uUpUkITEZG0WuvutVKrZGb5gJHATe6+Ifr2l7u7mXl6BKeEJiISchk5y9HMshNJZm+5+3tB\n8WozK+Huq4LhxDVB+UqgVNTpJYOylcH23uUp0j00EZEQi9XtszTOcjTgVeA7d38q6tBooFuw3Q0Y\nFVXe1cxymlk5IpM/ZgfDkxvMrF5wzYujzjkg9dBERCRWGgIXAV+b2cKgrA/wCDDczC4HfgY6A7j7\nIjMbDnxLZIZkD3ffGZx3LfAGkBsYH3xSpIQmIhJ2GTTk6O6fpdBa0wOc0w/ot5/yuUDlg2lfCU1E\nJOS0lqOIiEgWoh6aiEjIxctajkpoIiIhFyf5TEOOIiISDuqhiYiEWVofIgsB9dBERCQU1EMTEQm5\neJm2r4QmIhJiRvzMctSQo4iIhIJ6aCIiIRcnHTQlNBGR0IuTjKYhRxERCQX10EREQk6zHEVEJBQ0\ny1FERCQLUQ9NRCTk4qSDpoQmIhJ6cZLRNOQoIiKhoB6aiEiIRRbbj48umnpoIiISCuqhiYiEmcXP\ntH0lNBGRkIuTfKYhRxERCQf10EREwi5OumhKaCIioWZxM8sx7hPaN18uWFv+mDw/Z3YcWUARYG1m\nByGhoN+l1JXJ7ACyorhPaO5eNLNjyArMbK6718rsOCTr0+9SxtMsRxERyfKMuLmFplmOIiISDuqh\nSVoNyOwAJDT0u5TR4qSLpoQmaeLu+iMkMaHfpYwXL7McNeT4//buO1iq8ozj+PcHglJEjAWNmmDv\nihAFWwYNYomocdSAlcigYNSoiV0THTVqHKNxsESNgmNiycQaR1HJRIyCgESsNMUoBgXsvcAvf7zv\ndQ4byr2wsPeefT7Mzt179uw572V29nnr84YQQiiFaKGFEELJxSzHEEIIpVAn8Sy6HMOyk7SlpD0l\ntal1WULLJNVLGyIsT9FCC9XQH9gAmCfpadtf17pAoWWxbQBJvYDXbb9d4yKVRx1tHxMttFANFwKv\nAz8FdouWWmgsSTtIapufbwxcAnxT21KFlioCWlgqxS4i2/NJX0SziKAWmuYC4MEc1GYAHwJfAUhq\nJfnizk0AAApvSURBVKl1DctWIqrSo3mLgBaaTJIKXUR9JfUGOgMXA2+QgtouEdTCokhqBWD7QOB9\n4G6gI6ml3z6/Nh9oW6MiloZIXY7VeDR3EdBCkxWC2WnA+cAA4Bqgt+3fkmraxwE9a1bI0GzlCtH8\n/Hwt2/1J37tjgD7AVZJul3QXcKmkdjUsbmgCSbdImi3pxcKx70h6TNK0/HP1wmtnS5ouaYqkvQvH\ne0h6Ib92TWMnDUVAC0tFUh9gD9u7A+8BWwEDJO1h+3JgAjC9lmUMzVOhQnQycKWkDrml9iTQDvgd\ncDVwE3Cl7c9rVtiSWIEdjsOBfSqOnQWMsr0pMCr/jqStSBPKts7vua7QxXw9MBjYND8qr7lQEdBC\noyykhvQmcJKkgcCOwH5AJ+ACSX1tXxUz1cKiSPoJcAxwpu1PAWwPAZ4DLgKes/247TdrWMzSWFFd\njrZHkyq4RQcCI/LzEcBBheN32v7S9gxSBXgnSesCnWyPzZWf2wrvWawIaGGJKsbMeuYugxm2XyfV\nnq63PQt4HphE+lIKYXE2Ah6wPUtSm4bxVtuHAu8A361p6cKirClpQuFxXCPe0yV/PwC8DXTJz9cj\nVYwbzMzH1svPK48vUaxDC0tUCGZDgNOBl4BHJd0JvAiMkNQdOBjY3/bsmhU2NDvFClHBW8DukjrZ\n/iifdxgw0/agFV7IkqticuK5y7I5q21LqvwsVE0EtLBIFS2ztYHtgJ2AHwB7AYOAYaSp1j2Bg22/\nWqPihmao4jN0MPAx8AnwKHAEcKykKaRZsucC/WpV1lKr7QzFdyStm1vj6wINFd63SAkZGqyfj72V\nn1ceX6LocgwLVfFFdCJwMrC17XdtjyQN7q4NnAGMtf0b26/UrsShOaqYAHIG6QvsVqAbaSbsWsBR\nwKHAgDyWEsrlAdJ4Kfnn/YXj/SWtLGlD0vDFuNw9+ZGkXnns/ujCexYrAlpYqIpa9THAOGD9PJUa\n2w8Do4E21Lr+F5o1STuQJgD0JtW2ZwM3Az1tn2v7cOBo2y/UrpTltqJmOUq6g7T8YnNJMyUNAi4D\n9pI0jbQs4zIA2y+R1h++DDwC/Nz2vHypE0ifkenAq8DDjfo7/79rO9SzipZZD+Ac4BHbN+Xa0kRg\nsu0B+ZwODbPUQgCQ1BlYw/arkrYjZf54D9gDONH27pLOJmUJGWT79tqVtvy6de/hR58YW5VrdenU\n9tllGUNb3mIMLXyrIpgdAmxJyuLQW9I425Py5I/XJA23PTCCWSiStBKwGbB/Hi9ZEzjC9md5duxf\n8qnvAb8HqvNNGwIR0EJBIZjtQ2ry700KakcCB0ian7uFNsx93iF8K1eIvsmTPM4BdgbOsP1ZPmUl\nYG9Jm5Mmf/SOdWYrRhVnOTZrMYYWFpDzMg4Fxtv+2vbzpAHZDsDhkrYGiMH7UJRbXw3ZHDYj5WS8\nFuguqR+A7WHAPaS1igdEMFuB6iM3cbTQ6t1C1gjNIGXN30jS9rYn2X4qL3zdk7ToNYRKbYBdJf0a\nwPbOktYEDgf6SfoAaE0aT7ujIZdjCNUUAa2OVYyZ9SPtQ/UBcBLwB+DQhm5G2/+U9Ezk1QtFktax\n/bbt2ZLeIeX0vAfA9lxJD5I+V2cC2wM/imC24rWAxlVVRJdjQNIJpE06dwNuAU7Nj87AwJxElAhm\noUjSFsB/JV0t6XDgBtL0/DmSrssVphnAY8CxQC/bU2tY5FByEdDqkKTv5en2zhlADiPNRDsX2AUY\nQlroegmpmyhSWYWF+QR4mtRFPYiUIX01YCTwETBM0lGkytFHthuV7SFUX+yHFkpJUhfgl8BQSR1z\n3sW55F2Cbb8PnAJsm1fsn257bs0KHJot2zNJC+67k2bEjiJl/bgIeBBYAxgIDLP9RY2KGVDV/jV3\nEdDqzxxgPCmb+c/yYunpwJ15DRHA90lZQVqTxj9CWEBhO6GzAJPWm80CegAvkMZhZwLH2H65JoUM\ndScmhdQJSZsCrWxPkfRnUkLhfYHBts+SdD0wWtLzpETDRxTS0ISwgNxd3RDUpgFXkoLZqbbvy+Nr\n7+QWf6gh0TK6C6shAlodkLQGMAWYK+lCYB5wI2m8YxNJx9seKqknsApweawzC0uSZ8h+Jel24Ang\nWtv35dcm17RwoS5FQKsDtt+V1Ad4nNTNvD1wF2lQ/ytg21zbvtX2l7UraWiJcqv/LKCrpPaFzCAh\nrFAR0OqE7X9I2hu4hhTQupAWSvcn7XG2OXAHEAEtLI2xpA1eQzMUXY6hdGw/JulXpF2me9keIekB\nUpaH9rY/rG0JQ0tle7Kk/tE6a55awgzFaoiAVmdsPyRpPjBW0s623611mUI5RDALtRYBrQ7ZflhS\nW+BxST0iFVEIJdZCFkVXQwS0OmX7fkmjIpiFUG4tJFF+VcTC6jpm+5NalyGEEKolWmghhFB2ddJE\nixZaCCGEUogWWgghlFy9TNuPFlposSTNk/ScpBcl/VVS+2W4Vm9Jf8/PD8iZLxZ1bue8h1xT73FB\nXgfYqOMV5wyXdEgT7tVV0otNLWMop9g+JoTm73Pb3WxvQ0rhNaT4opImf8ZtP2D7ssWc0hlockAL\nISxfEdBCWTxJSrTcVdIUSbeRMqJsIKmvpDGSJuaWXEcASftImixpIoW0TZIGShqWn3eRdK+kSfmx\nC3AZsHFuHV6Rzztd0nhJz+cE0A3XOlfSVEn/IqUXWyxJg/N1Jkn6W0Wrs4+kCfl6++fzW0u6onDv\n45f1PzKUj6r0aO4ioIUWL+/jti9pHy6ATYHrbG8NfAqcB/Sx3R2YAJwmaRXgJqAfaduTdRZx+WuA\nJ2xvT9rI8iXSHmCv5tbh6ZL65nvuBHQDekj6oaQepFyZ3YD9gB0b8efcY3vHfL9XSDtBN+ia7/Fj\n4Ib8NwwCPrS9Y77+YEkbNuI+oZ7USUSLSSGhJWsn6bn8/EngT6SNS/9je2w+3gvYCngqb9/VFhgD\nbAHMsD0NIG+BctxC7rEncDRA3h/uQ0mrV5zTNz/+nX/vSApwqwL3NqSEynkzl2QbSReTujU7AiML\nr92dF8JPk/Ra/hv6AtsVxtdWy/ee2oh7hVAqEdBCS/a57W7FAzlofVo8BDxme0DFeQu8bxkJuNT2\nHyvuccpSXGs4cJDtSZIGAr0Lr7niXOd7n2S7GPiQ1HUp7h1KKmY5hlAOY4FdJW0CIKmDpM2AyaT9\nuzbO5w1YxPtHAUPze1tLWg34mNT6ajASOLYwNreepLWB0cBBktpJWpXUvbkkqwKzJLUBjqh47VBJ\nrXKZNyJt2joSGJrPR9Jmkjo04j6hTjTsWF0PsxyjhRZKzfac3NK5Q9LK+fB5tqdKOg54SNJnpC7L\nVRdyiV8AN0oaRNrpe6jtMZKeytPiH87jaFsCY3IL8RPgSNsTJd0FTAJmA+MbUeTzgWeAOflnsUxv\nAOOATsAQ219Iupk0tjYxb9I6Bziocf87oR5MnPjsyHZttGaVLje3StdZLpR2UQ8hhBBatuhyDCGE\nUAoR0EIIIZRCBLQQQgilEAEthBBCKURACyGEUAoR0EIIIZRCBLQQQgilEAEthBBCKURACyGEUAr/\nA0eSJwOdJLH+AAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7fb0eb444e48>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plot(actual_value = df_.loc[:,'Actual'].values.astype(int),\n",
    "     pred_value = df_.loc[:,'Prediction'].values.astype(int))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-24T00:01:49.850833Z",
     "start_time": "2017-07-24T00:01:49.828658Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "no_of_features  hidden_layers\n",
       "1.0             1.0              (0.578452539765, 0.841046710607)\n",
       "                3.0                                    (nan, nan)\n",
       "12.0            1.0              (0.668723301504, 0.932285711705)\n",
       "                3.0              (0.636233676898, 0.977571551948)\n",
       "24.0            1.0                (0.7637835282, 0.920711831434)\n",
       "                3.0               (0.783714231971, 0.84733658272)\n",
       "48.0            1.0              (0.817941844822, 0.875285309714)\n",
       "                3.0                (0.60901468163, 1.02861547867)\n",
       "122.0           1.0              (0.665214663577, 0.974471096522)\n",
       "                3.0              (0.855675801742, 0.874380756313)\n",
       "dtype: object"
      ]
     },
     "execution_count": 33,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from scipy import stats\n",
    "\n",
    "def fn(x):\n",
    "    #print(x)\n",
    "    return stats.norm.interval(0.95, loc=x.f1_score.mean(), scale=x.f1_score.std())\n",
    "psg.apply(fn)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "_draft": {
   "nbviewer_url": "https://gist.github.com/7d1ace18a82178e15ece8fc5252fce88"
  },
  "anaconda-cloud": {},
  "gist": {
   "data": {
    "description": "Hyper parameter tuning",
    "public": false
   },
   "id": "7d1ace18a82178e15ece8fc5252fce88"
  },
  "kernelspec": {
   "display_name": "Python [conda env:p3]",
   "language": "python",
   "name": "conda-env-p3-py"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.1"
  },
  "varInspector": {
   "cols": {
    "lenName": 16,
    "lenType": 16,
    "lenVar": 40
   },
   "kernels_config": {
    "python": {
     "delete_cmd_postfix": "",
     "delete_cmd_prefix": "del ",
     "library": "var_list.py",
     "varRefreshCmd": "print(var_dic_list())"
    },
    "r": {
     "delete_cmd_postfix": ") ",
     "delete_cmd_prefix": "rm(",
     "library": "var_list.r",
     "varRefreshCmd": "cat(var_dic_list()) "
    }
   },
   "types_to_exclude": [
    "module",
    "function",
    "builtin_function_or_method",
    "instance",
    "_Feature"
   ],
   "window_display": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
