{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Read Data Sample"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:46:36.459086Z",
     "start_time": "2017-07-23T22:46:36.063075Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "import numpy as np\n",
    "import os\n",
    "import time\n",
    "from collections import namedtuple\n",
    "pd.set_option(\"display.max_rows\",35)\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:46:36.469934Z",
     "start_time": "2017-07-23T22:46:36.460477Z"
    }
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "rm: cannot remove 'dataset/scores/tf_dense_only_nsl_kdd_scores_all.pkl': No such file or directory\n"
     ]
    }
   ],
   "source": [
    "%%bash\n",
    "rm dataset/scores/tf_dense_only_nsl_kdd_scores_all.pkl"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:46:36.569772Z",
     "start_time": "2017-07-23T22:46:36.471730Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class dataset:\n",
    "    kdd_train_2labels = pd.read_pickle(\"dataset/kdd_train_2labels.pkl\")\n",
    "    kdd_test_2labels = pd.read_pickle(\"dataset/kdd_test_2labels.pkl\")\n",
    "    kdd_test__2labels = pd.read_pickle(\"dataset/kdd_test__2labels.pkl\")\n",
    "    \n",
    "    kdd_train_5labels = pd.read_pickle(\"dataset/kdd_train_5labels.pkl\")\n",
    "    kdd_test_5labels = pd.read_pickle(\"dataset/kdd_test_5labels.pkl\")\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:46:36.576856Z",
     "start_time": "2017-07-23T22:46:36.571336Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(125973, 124)"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dataset.kdd_train_2labels.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:46:36.582374Z",
     "start_time": "2017-07-23T22:46:36.578330Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(22544, 124)"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dataset.kdd_test_2labels.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:46:37.389027Z",
     "start_time": "2017-07-23T22:46:36.583840Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.99589320646770185"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn import model_selection as ms\n",
    "from sklearn import preprocessing as pp\n",
    "\n",
    "class preprocess:\n",
    "    \n",
    "    output_columns_2labels = ['is_Normal','is_Attack']\n",
    "    \n",
    "    x_input = dataset.kdd_train_2labels.drop(output_columns_2labels, axis = 1)\n",
    "    y_output = dataset.kdd_train_2labels.loc[:,output_columns_2labels]\n",
    "\n",
    "    x_test_input = dataset.kdd_test_2labels.drop(output_columns_2labels, axis = 1)\n",
    "    y_test = dataset.kdd_test_2labels.loc[:,output_columns_2labels]\n",
    "    \n",
    "    x_test__input = dataset.kdd_test__2labels.drop(output_columns_2labels, axis = 1)\n",
    "    y_test_ = dataset.kdd_test__2labels.loc[:,output_columns_2labels]\n",
    "\n",
    "    ss = pp.StandardScaler()\n",
    "\n",
    "    x_train = ss.fit_transform(x_input)\n",
    "    x_test = ss.transform(x_test_input)\n",
    "    x_test_ = ss.transform(x_test__input)\n",
    "\n",
    "    y_train = y_output.values\n",
    "    y_test = y_test.values\n",
    "    y_test_ = y_test_.values\n",
    "\n",
    "    \n",
    "preprocess.x_train.std()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:46:39.155545Z",
     "start_time": "2017-07-23T22:46:37.390539Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "from tensorflow.contrib.legacy_seq2seq.python.ops.seq2seq import basic_rnn_seq2seq\n",
    "from tensorflow.contrib.rnn import RNNCell, LSTMCell, MultiRNNCell\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:46:39.409271Z",
     "start_time": "2017-07-23T22:46:39.157099Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class network(object):\n",
    "    \n",
    "    input_dim = 122\n",
    "    classes = 2\n",
    "    hidden_encoder_dim = 122\n",
    "    hidden_layers = 1\n",
    "    latent_dim = 10\n",
    "\n",
    "    hidden_decoder_dim = 122\n",
    "    lam = 0.01\n",
    "    \n",
    "    def __init__(self, classes, hidden_layers, num_of_features):\n",
    "        self.classes = classes\n",
    "        self.hidden_layers = hidden_layers\n",
    "        self.latent_dim = num_of_features\n",
    "            \n",
    "    def build_layers(self):\n",
    "        tf.reset_default_graph()\n",
    "        #learning_rate = tf.Variable(initial_value=0.001)\n",
    "\n",
    "        input_dim = self.input_dim\n",
    "        classes = self.classes\n",
    "        hidden_encoder_dim = self.hidden_encoder_dim\n",
    "        hidden_layers = self.hidden_layers\n",
    "        latent_dim = self.latent_dim\n",
    "        hidden_decoder_dim = self.hidden_decoder_dim\n",
    "        lam = self.lam\n",
    "        \n",
    "        with tf.variable_scope(\"Input\"):\n",
    "            self.x_input = tf.placeholder(\"float\", shape=[None, 1, input_dim])\n",
    "            self.y_input_ = tf.placeholder(\"float\", shape=[None, 1, classes])\n",
    "            self.keep_prob = tf.placeholder(\"float\")\n",
    "            self.lr = tf.placeholder(\"float\")\n",
    "            self.x_list = tf.unstack(self.x_input, axis= 1)\n",
    "            self.y_list_ = tf.unstack(self.y_input_, axis = 1)\n",
    "            self.y_ = self.y_list_[0]\n",
    "            \n",
    "            #GO = tf.fill((tf.shape(self.x)[0], 1), 0.5)\n",
    "            \n",
    "            #y_with_GO = tf.stack([self.y_, GO])\n",
    "            \n",
    "        with tf.variable_scope(\"lstm\"):\n",
    "            multi_cell = MultiRNNCell([LSTMCell(input_dim) for i in range(hidden_layers)] )\n",
    "            \n",
    "            self.y, states = basic_rnn_seq2seq(self.x_list, self.y_list_, multi_cell)\n",
    "            #self.y = tf.slice(self.y, [0, 0], [-1,2])\n",
    "            \n",
    "            #self.out = tf.squeeze(self.y)\n",
    "            \n",
    "            #self.y = tf.layers.dense(self.y[0], classes, activation = None)\n",
    "            \n",
    "            self.y = tf.slice(self.y[0], [0, 0], [-1,2])\n",
    "            \n",
    "        with tf.variable_scope(\"Loss\"):\n",
    "            \n",
    "            self.regularized_loss = tf.losses.mean_squared_error(self.y_, self.y)\n",
    "            correct_prediction = tf.equal(tf.argmax(self.y_, 1), tf.argmax(self.y, 1))\n",
    "            self.tf_accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32), name = \"Accuracy\")\n",
    "\n",
    "        with tf.variable_scope(\"Optimizer\"):\n",
    "            learning_rate=self.lr\n",
    "            optimizer = tf.train.AdamOptimizer(learning_rate)\n",
    "            gradients, variables = zip(*optimizer.compute_gradients(self.regularized_loss))\n",
    "            gradients = [\n",
    "                None if gradient is None else tf.clip_by_value(gradient, -1, 1)\n",
    "                for gradient in gradients]\n",
    "            self.train_op = optimizer.apply_gradients(zip(gradients, variables))\n",
    "            #self.train_op = optimizer.minimize(self.regularized_loss)\n",
    "            \n",
    "        # add op for merging summary\n",
    "        #self.summary_op = tf.summary.merge_all()\n",
    "        self.pred = tf.argmax(self.y, axis = 1)\n",
    "        self.actual = tf.argmax(self.y_, axis = 1)\n",
    "\n",
    "        # add Saver ops\n",
    "        self.saver = tf.train.Saver()\n",
    "        "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-06-01T00:59:00.684124Z",
     "start_time": "2017-06-01T00:58:59.843181Z"
    }
   },
   "source": [
    "batch_iterations = 200\n",
    "\n",
    "x_train, x_valid, y_train, y_valid, = ms.train_test_split(preprocess.x_train, \n",
    "                                                                          preprocess.y_train, \n",
    "                                                                          test_size=0.1)\n",
    "batch_indices = np.array_split(np.arange(x_train.shape[0]), \n",
    "                                           batch_iterations)\n",
    "                                                                          \n",
    "for i in batch_indices:\n",
    "    print(x_train[i,np.newaxis,:])\n",
    "    print(y_train[i,np.newaxis,:])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:46:39.741752Z",
     "start_time": "2017-07-23T22:46:39.410915Z"
    },
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "import collections\n",
    "import sklearn.metrics as me\n",
    "\n",
    "class Train:    \n",
    "    \n",
    "    result = namedtuple(\"score\", ['epoch', 'no_of_features','hidden_layers','train_score', 'test_score', 'f1_score', 'test_score_20', 'f1_score_20', 'time_taken'])\n",
    "\n",
    "    predictions = {}\n",
    "    predictions_ = {}\n",
    "\n",
    "    results = []\n",
    "    best_acc = 0\n",
    "    best_acc_global = 0\n",
    "\n",
    "    def train(epochs, net, h,f, lrs):\n",
    "        batch_iterations = 200\n",
    "        train_loss = None\n",
    "        Train.best_acc = 0\n",
    "        os.makedirs(\"dataset/tf_lstm_nsl_kdd-orig/hidden layers_{}_features count_{}\".format(h,f),\n",
    "                    exist_ok = True)\n",
    "        with tf.Session() as sess:\n",
    "            #summary_writer_train = tf.summary.FileWriter('./logs/kdd/VAE/training', graph=sess.graph)\n",
    "            #summary_writer_valid = tf.summary.FileWriter('./logs/kdd/VAE/validation')\n",
    "\n",
    "            sess.run(tf.global_variables_initializer())\n",
    "            start_time = time.perf_counter()\n",
    "            \n",
    "            accuracy, pred_value, actual_value, y_pred = sess.run([net.tf_accuracy, \n",
    "                                                                   net.pred, \n",
    "                                                                   net.actual, net.y], \n",
    "                                                                  feed_dict={net.x_input: preprocess.x_test[:,np.newaxis,:], \n",
    "                                                                             net.y_input_: preprocess.y_test[:,np.newaxis,:], \n",
    "                                                                             net.keep_prob:1})\n",
    "            \n",
    "            print(\"Initial Accuracy, before training: {}\".format(accuracy))\n",
    "            \n",
    "            for c, lr in enumerate(lrs):\n",
    "                for epoch in range(1, (epochs+1)):\n",
    "                    x_train, x_valid, y_train, y_valid, = ms.train_test_split(preprocess.x_train, \n",
    "                                                                              preprocess.y_train, \n",
    "                                                                              test_size=0.1)\n",
    "                    batch_indices = np.array_split(np.arange(x_train.shape[0]), \n",
    "                                               batch_iterations)\n",
    "\n",
    "                    for i in batch_indices:\n",
    "\n",
    "                        _, train_loss = sess.run([net.train_op, net.regularized_loss], #net.summary_op\n",
    "                                                              feed_dict={net.x_input: x_train[i,np.newaxis,:], \n",
    "                                                                         net.y_input_: y_train[i,np.newaxis,:], \n",
    "                                                                         net.keep_prob:1, net.lr:lr})\n",
    "                        #summary_writer_train.add_summary(summary_str, epoch)\n",
    "                        if(train_loss > 1e9):\n",
    "                            print(\"Step {} | Training Loss: {:.6f}\".format(epoch, train_loss))\n",
    "\n",
    "\n",
    "                    valid_accuracy,valid_loss = sess.run([net.tf_accuracy, net.regularized_loss], #net.summary_op \n",
    "                                                          feed_dict={net.x_input: x_valid[:,np.newaxis,:], \n",
    "                                                                     net.y_input_: y_valid[:,np.newaxis,:], \n",
    "                                                                     net.keep_prob:1, net.lr:lr})\n",
    "                    #summary_writer_valid.add_summary(summary_str, epoch)\n",
    "\n",
    "\n",
    "\n",
    "                    accuracy, pred_value, actual_value, y_pred = sess.run([net.tf_accuracy, \n",
    "                                                                   net.pred, \n",
    "                                                                   net.actual, net.y], \n",
    "                                                                  feed_dict={net.x_input: preprocess.x_test[:,np.newaxis,:], \n",
    "                                                                             net.y_input_: preprocess.y_test[:,np.newaxis,:], \n",
    "                                                                             net.keep_prob:1, net.lr:lr})\n",
    "                    f1_score = me.f1_score(actual_value, pred_value)\n",
    "                    accuracy_, pred_value_, actual_value_, y_pred_ = sess.run([net.tf_accuracy, \n",
    "                                                                   net.pred, \n",
    "                                                                   net.actual, net.y], \n",
    "                                                                  feed_dict={net.x_input: preprocess.x_test_[:,np.newaxis,:], \n",
    "                                                                             net.y_input_: preprocess.y_test_[:,np.newaxis,:], \n",
    "                                                                             net.keep_prob:1, net.lr:lr})\n",
    "                    f1_score_ = me.f1_score(actual_value_, pred_value_)\n",
    "                    print(\"Step {} | Training Loss: {:.6f} | Train Accuracy: {:.6f} | Test Accuracy: {:.6f}, {:.6f}\".format(epoch, train_loss, valid_accuracy, accuracy, accuracy_))\n",
    "\n",
    "                    if accuracy > Train.best_acc_global:\n",
    "                                Train.best_acc_global = accuracy\n",
    "                                Train.pred_value = pred_value\n",
    "                                Train.actual_value = actual_value\n",
    "                                Train.pred_value_ = pred_value_\n",
    "                                Train.actual_value_ = actual_value_\n",
    "                                Train.best_parameters = \"Hidden Layers:{}, Features Count:{}\".format(h, f)\n",
    "\n",
    "                    if accuracy > Train.best_acc:\n",
    "\n",
    "                        #net.saver.save(sess, \"dataset/tf_vae_only_nsl_kdd_hidden layers_{}_features count_{}\".format(epochs,h,f))\n",
    "                        #Train.results.append(Train.result(epochs, f, h,valid_accuracy, accuracy))\n",
    "                        #curr_pred = pd.DataFrame({\"Attack_prob\":y_pred[:,-2], \"Normal_prob\":y_pred[:, -1]})\n",
    "                        #Train.predictions.update({\"{}_{}_{}\".format(epochs,f,h):curr_pred})\n",
    "\n",
    "                        Train.best_acc = accuracy\n",
    "                        if not (np.isnan(train_loss)):\n",
    "                            net.saver.save(sess, \n",
    "                                       \"dataset/tf_lstm_nsl_kdd-orig/hidden layers_{}_features count_{}/model\"\n",
    "                                       .format(h,f), \n",
    "                                       global_step = epoch, \n",
    "                                       write_meta_graph=False)\n",
    "\n",
    "                        curr_pred = pd.DataFrame({\"Attack_prob\":y_pred[:,-2], \"Normal_prob\":y_pred[:, -1], \"Prediction\":pred_value, \"Actual\":actual_value})\n",
    "                        curr_pred_ = pd.DataFrame({\"Attack_prob\":y_pred_[:,-2], \"Normal_prob\":y_pred_[:, -1], \"Prediction\":pred_value_, \"Actual\": actual_value_})\n",
    "                        Train.predictions.update({\"{}_{}_{}\".format((epochs+1)* (c+1),f,h):\n",
    "                                                  (curr_pred,\n",
    "                                                   Train.result((epochs+1)*(c+1), f, h,valid_accuracy, accuracy, f1_score, accuracy_, f1_score_, time.perf_counter() - start_time))})\n",
    "                        Train.predictions_.update({\"{}_{}_{}\".format((epochs+1)* (c+1),f,h):\n",
    "                                                  (curr_pred_,\n",
    "                                                   Train.result((epochs+1)*(c+1), f, h,valid_accuracy, accuracy, f1_score, accuracy_, f1_score_, time.perf_counter() - start_time))})\n",
    "\n",
    "\n",
    "\n",
    "            "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:46:39.820664Z",
     "start_time": "2017-07-23T22:46:39.743262Z"
    },
    "collapsed": true,
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "import itertools\n",
    "\n",
    "df_results = []\n",
    "past_scores = []\n",
    "\n",
    "class Hyperparameters:\n",
    "#    features_arr = [2, 4, 8, 16, 32, 64, 128, 256]\n",
    "#    hidden_layers_arr = [2, 4, 6, 10]\n",
    "\n",
    "    def start_training():\n",
    "\n",
    "        global df_results\n",
    "        global past_scores\n",
    "        \n",
    "        Train.predictions = {}\n",
    "        Train.results = []\n",
    "        \n",
    "        features_arr = [1] #[4, 8, 16, 32]\n",
    "        hidden_layers_arr = [1, 3]\n",
    "\n",
    "        epochs = [10]\n",
    "        lrs = [1e-2, 1e-3]\n",
    "\n",
    "        for e, h, f in itertools.product(epochs, hidden_layers_arr, features_arr):\n",
    "            print(\"Current Layer Attributes - epochs:{} hidden layers:{} features count:{}\".format(e,h,f))\n",
    "            n = network(2,h,f)\n",
    "            n.build_layers()\n",
    "            Train.train(e, n, h,f, lrs)\n",
    "            \n",
    "        dict1 = {}\n",
    "        dict1_ = {}\n",
    "        dict2 = []\n",
    "        for k, (v1, v2) in Train.predictions.items():\n",
    "            dict1.update({k: v1})\n",
    "            dict2.append(v2)\n",
    "\n",
    "        for k, (v1_, v2) in Train.predictions.items():\n",
    "            dict1_.update({k: v1_})\n",
    "\n",
    "            \n",
    "        Train.predictions = dict1\n",
    "        Train.predictions_ = dict1_\n",
    "\n",
    "        Train.results = dict2\n",
    "        df_results = pd.DataFrame(Train.results)\n",
    "        temp = df_results.set_index(['no_of_features', 'hidden_layers'])\n",
    "\n",
    "        if not os.path.isfile('dataset/scores/tf_lstm_nsl_kdd-orig_all.pkl'):\n",
    "            past_scores = temp\n",
    "        else:\n",
    "            past_scores = pd.read_pickle(\"dataset/scores/tf_lstm_nsl_kdd-orig_all.pkl\")\n",
    "\n",
    "        past_scores.append(temp).to_pickle(\"dataset/scores/tf_lstm_nsl_kdd-orig_all.pkl\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:50:44.071379Z",
     "start_time": "2017-07-23T22:46:39.822207Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Current Layer Attributes - epochs:10 hidden layers:1 features count:1\n",
      "Initial Accuracy, before training: 0.2640613913536072\n",
      "Step 1 | Training Loss: 0.005933 | Train Accuracy: 0.996984 | Test Accuracy: 0.799902, 0.619325\n",
      "Step 2 | Training Loss: 0.004664 | Train Accuracy: 0.998571 | Test Accuracy: 0.804072, 0.627257\n",
      "Step 3 | Training Loss: 0.001426 | Train Accuracy: 0.999524 | Test Accuracy: 0.862935, 0.739241\n",
      "Step 4 | Training Loss: 0.001532 | Train Accuracy: 0.999921 | Test Accuracy: 0.871895, 0.756287\n",
      "Step 5 | Training Loss: 0.001510 | Train Accuracy: 0.999921 | Test Accuracy: 0.883827, 0.778987\n",
      "Step 6 | Training Loss: 0.001037 | Train Accuracy: 0.999921 | Test Accuracy: 0.885335, 0.781857\n",
      "Step 7 | Training Loss: 0.001712 | Train Accuracy: 1.000000 | Test Accuracy: 0.886888, 0.784810\n",
      "Step 8 | Training Loss: 0.001306 | Train Accuracy: 0.999921 | Test Accuracy: 0.890082, 0.790886\n",
      "Step 9 | Training Loss: 0.001163 | Train Accuracy: 1.000000 | Test Accuracy: 0.896203, 0.802532\n",
      "Step 10 | Training Loss: 0.001658 | Train Accuracy: 1.000000 | Test Accuracy: 0.898909, 0.807679\n",
      "Step 1 | Training Loss: 0.001156 | Train Accuracy: 0.999921 | Test Accuracy: 0.896957, 0.803966\n",
      "Step 2 | Training Loss: 0.001219 | Train Accuracy: 1.000000 | Test Accuracy: 0.895981, 0.802110\n",
      "Step 3 | Training Loss: 0.001398 | Train Accuracy: 1.000000 | Test Accuracy: 0.895759, 0.801688\n",
      "Step 4 | Training Loss: 0.001197 | Train Accuracy: 1.000000 | Test Accuracy: 0.895626, 0.801435\n",
      "Step 5 | Training Loss: 0.001089 | Train Accuracy: 1.000000 | Test Accuracy: 0.895493, 0.801181\n",
      "Step 6 | Training Loss: 0.001146 | Train Accuracy: 1.000000 | Test Accuracy: 0.893896, 0.798143\n",
      "Step 7 | Training Loss: 0.001348 | Train Accuracy: 1.000000 | Test Accuracy: 0.895759, 0.801688\n",
      "Step 8 | Training Loss: 0.001157 | Train Accuracy: 1.000000 | Test Accuracy: 0.895316, 0.800844\n",
      "Step 9 | Training Loss: 0.001277 | Train Accuracy: 1.000000 | Test Accuracy: 0.894473, 0.799241\n",
      "Step 10 | Training Loss: 0.001332 | Train Accuracy: 1.000000 | Test Accuracy: 0.895893, 0.801941\n",
      "Current Layer Attributes - epochs:10 hidden layers:3 features count:1\n",
      "Initial Accuracy, before training: 0.27031582593917847\n",
      "Step 1 | Training Loss: 0.001557 | Train Accuracy: 0.999603 | Test Accuracy: 0.991883, 0.984557\n",
      "Step 2 | Training Loss: 0.000653 | Train Accuracy: 0.999365 | Test Accuracy: 0.981636, 0.965063\n",
      "Step 3 | Training Loss: 0.000653 | Train Accuracy: 0.999444 | Test Accuracy: 0.981946, 0.965654\n",
      "Step 4 | Training Loss: 0.003132 | Train Accuracy: 0.999365 | Test Accuracy: 0.993302, 0.987257\n",
      "Step 5 | Training Loss: 0.002415 | Train Accuracy: 0.999286 | Test Accuracy: 0.994855, 0.990211\n",
      "Step 6 | Training Loss: 0.002414 | Train Accuracy: 0.999286 | Test Accuracy: 0.982213, 0.966160\n",
      "Step 7 | Training Loss: 0.000648 | Train Accuracy: 0.999841 | Test Accuracy: 0.984253, 0.970042\n",
      "Step 8 | Training Loss: 0.000649 | Train Accuracy: 0.999762 | Test Accuracy: 0.984253, 0.970042\n",
      "Step 9 | Training Loss: 0.001530 | Train Accuracy: 1.000000 | Test Accuracy: 0.986604, 0.974515\n",
      "Step 10 | Training Loss: 0.000648 | Train Accuracy: 0.999841 | Test Accuracy: 0.987757, 0.976709\n",
      "Step 1 | Training Loss: 0.000648 | Train Accuracy: 0.999762 | Test Accuracy: 0.987757, 0.976709\n",
      "Step 2 | Training Loss: 0.000648 | Train Accuracy: 0.999365 | Test Accuracy: 0.987580, 0.976371\n",
      "Step 3 | Training Loss: 0.000648 | Train Accuracy: 0.999921 | Test Accuracy: 0.987580, 0.976371\n",
      "Step 4 | Training Loss: 0.000647 | Train Accuracy: 0.999603 | Test Accuracy: 0.987535, 0.976287\n",
      "Step 5 | Training Loss: 0.000647 | Train Accuracy: 0.999921 | Test Accuracy: 0.987535, 0.976287\n",
      "Step 6 | Training Loss: 0.000647 | Train Accuracy: 0.999762 | Test Accuracy: 0.987491, 0.976203\n",
      "Step 7 | Training Loss: 0.000647 | Train Accuracy: 0.999841 | Test Accuracy: 0.987491, 0.976203\n",
      "Step 8 | Training Loss: 0.001530 | Train Accuracy: 0.999603 | Test Accuracy: 0.987048, 0.975359\n",
      "Step 9 | Training Loss: 0.001530 | Train Accuracy: 0.999841 | Test Accuracy: 0.987048, 0.975359\n",
      "Step 10 | Training Loss: 0.000647 | Train Accuracy: 0.999841 | Test Accuracy: 0.987048, 0.975359\n"
     ]
    }
   ],
   "source": [
    "#%%timeit -r 10\n",
    "\n",
    "Hyperparameters.start_training()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:50:44.085182Z",
     "start_time": "2017-07-23T22:50:44.072879Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "pd.Panel(Train.predictions).to_pickle(\"dataset/tf_lstm_nsl_kdd_predictions.pkl\")\n",
    "pd.Panel(Train.predictions_).to_pickle(\"dataset/tf_lstm_nsl_kdd_predictions__.pkl\")\n",
    "\n",
    "df_results.to_pickle(\"dataset/tf_lstm_nsl_kdd_scores.pkl\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:50:44.157438Z",
     "start_time": "2017-07-23T22:50:44.086667Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import itertools\n",
    "\n",
    "def plot_confusion_matrix(cm, classes,\n",
    "                          normalize=False,\n",
    "                          title='Confusion matrix',\n",
    "                          cmap=plt.cm.Blues):\n",
    "    \"\"\"\n",
    "    This function prints and plots the confusion matrix.\n",
    "    Normalization can be applied by setting `normalize=True`.\n",
    "    \"\"\"\n",
    "    np.set_printoptions(precision=4)\n",
    "\n",
    "    plt.imshow(cm, interpolation='nearest', cmap=cmap)\n",
    "    plt.title(title)\n",
    "    plt.colorbar()\n",
    "    tick_marks = np.arange(len(classes))\n",
    "    plt.xticks(tick_marks, classes, rotation=45)\n",
    "    plt.yticks(tick_marks, classes)\n",
    "\n",
    "    if normalize:\n",
    "        cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n",
    "        #print(\"Normalized confusion matrix\")\n",
    "    else:\n",
    "        #print('Confusion matrix, without normalization')\n",
    "        pass\n",
    "    \n",
    "    #print(cm)\n",
    "\n",
    "    label = [[\"\\n True Negative\", \"\\n False Positive \\n Type II Error\"],\n",
    "             [\"\\n False Negative \\n Type I Error\", \"\\n True Positive\"]\n",
    "            ]\n",
    "    \n",
    "    thresh = cm.max() / 2.\n",
    "    for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):\n",
    "        \n",
    "        plt.text(j, i, \"{} {}\".format(cm[i, j].round(4), label[i][j]),\n",
    "                 horizontalalignment=\"center\",\n",
    "                 color=\"white\" if cm[i, j] > thresh else \"black\")\n",
    "\n",
    "    plt.tight_layout()\n",
    "    plt.ylabel('True label')\n",
    "    plt.xlabel('Predicted label')\n",
    "\n",
    "def plot(actual_value, pred_value):\n",
    "    from sklearn.metrics import confusion_matrix\n",
    "\n",
    "    cm_2labels = confusion_matrix(y_pred = pred_value, y_true = actual_value)\n",
    "    plt.figure(figsize=[6,6])\n",
    "    plot_confusion_matrix(cm_2labels, ['Normal', 'Attack'], normalize = False)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:51:09.741577Z",
     "start_time": "2017-07-23T22:51:09.737949Z"
    }
   },
   "outputs": [],
   "source": [
    "past_scores = pd.read_pickle(\"dataset/scores/tf_lstm_nsl_kdd-orig_all.pkl\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:51:12.836326Z",
     "start_time": "2017-07-23T22:51:12.808961Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th>epoch</th>\n",
       "      <th>train_score</th>\n",
       "      <th>test_score</th>\n",
       "      <th>f1_score</th>\n",
       "      <th>test_score_20</th>\n",
       "      <th>f1_score_20</th>\n",
       "      <th>time_taken</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>no_of_features</th>\n",
       "      <th>hidden_layers</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th rowspan=\"35\" valign=\"top\">1</th>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>24.954079</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>24.954079</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999841</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>14.823690</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.999956</td>\n",
       "      <td>0.999961</td>\n",
       "      <td>0.999916</td>\n",
       "      <td>0.999948</td>\n",
       "      <td>26.486510</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.999911</td>\n",
       "      <td>0.999922</td>\n",
       "      <td>0.999831</td>\n",
       "      <td>0.999897</td>\n",
       "      <td>66.155075</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>22</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.999512</td>\n",
       "      <td>0.999571</td>\n",
       "      <td>0.999072</td>\n",
       "      <td>0.999433</td>\n",
       "      <td>79.905635</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.999468</td>\n",
       "      <td>0.999532</td>\n",
       "      <td>0.998987</td>\n",
       "      <td>0.999381</td>\n",
       "      <td>43.296997</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.999068</td>\n",
       "      <td>0.999182</td>\n",
       "      <td>0.998228</td>\n",
       "      <td>0.998918</td>\n",
       "      <td>18.580710</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.999024</td>\n",
       "      <td>0.999144</td>\n",
       "      <td>0.998143</td>\n",
       "      <td>0.998867</td>\n",
       "      <td>12.872745</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.998980</td>\n",
       "      <td>0.999105</td>\n",
       "      <td>0.998059</td>\n",
       "      <td>0.998816</td>\n",
       "      <td>19.107379</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>22</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.998669</td>\n",
       "      <td>0.998832</td>\n",
       "      <td>0.997468</td>\n",
       "      <td>0.998456</td>\n",
       "      <td>134.621442</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.998359</td>\n",
       "      <td>0.998560</td>\n",
       "      <td>0.996878</td>\n",
       "      <td>0.998096</td>\n",
       "      <td>68.964017</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999286</td>\n",
       "      <td>0.994855</td>\n",
       "      <td>0.995499</td>\n",
       "      <td>0.990211</td>\n",
       "      <td>0.994051</td>\n",
       "      <td>45.016607</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>0.997460</td>\n",
       "      <td>0.988911</td>\n",
       "      <td>0.990164</td>\n",
       "      <td>0.978903</td>\n",
       "      <td>0.986942</td>\n",
       "      <td>63.613685</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.984209</td>\n",
       "      <td>0.986319</td>\n",
       "      <td>0.969958</td>\n",
       "      <td>0.981977</td>\n",
       "      <td>35.610800</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.978708</td>\n",
       "      <td>0.981642</td>\n",
       "      <td>0.959494</td>\n",
       "      <td>0.975850</td>\n",
       "      <td>28.628547</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999682</td>\n",
       "      <td>0.931423</td>\n",
       "      <td>0.936965</td>\n",
       "      <td>0.869536</td>\n",
       "      <td>0.915316</td>\n",
       "      <td>21.562394</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>22</td>\n",
       "      <td>0.999682</td>\n",
       "      <td>0.926322</td>\n",
       "      <td>0.931996</td>\n",
       "      <td>0.859831</td>\n",
       "      <td>0.908510</td>\n",
       "      <td>46.670596</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999682</td>\n",
       "      <td>0.926189</td>\n",
       "      <td>0.931865</td>\n",
       "      <td>0.859578</td>\n",
       "      <td>0.908330</td>\n",
       "      <td>20.351100</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999682</td>\n",
       "      <td>0.926189</td>\n",
       "      <td>0.931865</td>\n",
       "      <td>0.859578</td>\n",
       "      <td>0.908330</td>\n",
       "      <td>20.351100</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>22</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.925701</td>\n",
       "      <td>0.931383</td>\n",
       "      <td>0.858650</td>\n",
       "      <td>0.907668</td>\n",
       "      <td>57.692709</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999921</td>\n",
       "      <td>0.924326</td>\n",
       "      <td>0.930299</td>\n",
       "      <td>0.856034</td>\n",
       "      <td>0.906295</td>\n",
       "      <td>28.694453</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999762</td>\n",
       "      <td>0.922729</td>\n",
       "      <td>0.929083</td>\n",
       "      <td>0.852996</td>\n",
       "      <td>0.904778</td>\n",
       "      <td>25.245101</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>22</td>\n",
       "      <td>0.999841</td>\n",
       "      <td>0.922951</td>\n",
       "      <td>0.929053</td>\n",
       "      <td>0.853418</td>\n",
       "      <td>0.904629</td>\n",
       "      <td>57.127875</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.920777</td>\n",
       "      <td>0.927357</td>\n",
       "      <td>0.849283</td>\n",
       "      <td>0.902490</td>\n",
       "      <td>25.294052</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999762</td>\n",
       "      <td>0.914878</td>\n",
       "      <td>0.922941</td>\n",
       "      <td>0.838059</td>\n",
       "      <td>0.897011</td>\n",
       "      <td>23.061939</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.916696</td>\n",
       "      <td>0.922184</td>\n",
       "      <td>0.841519</td>\n",
       "      <td>0.894872</td>\n",
       "      <td>41.223776</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>22</td>\n",
       "      <td>0.999841</td>\n",
       "      <td>0.913902</td>\n",
       "      <td>0.920843</td>\n",
       "      <td>0.836203</td>\n",
       "      <td>0.893650</td>\n",
       "      <td>53.601731</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999841</td>\n",
       "      <td>0.913591</td>\n",
       "      <td>0.920053</td>\n",
       "      <td>0.835612</td>\n",
       "      <td>0.892352</td>\n",
       "      <td>29.535652</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999921</td>\n",
       "      <td>0.912349</td>\n",
       "      <td>0.919864</td>\n",
       "      <td>0.833249</td>\n",
       "      <td>0.892539</td>\n",
       "      <td>26.791100</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>22</td>\n",
       "      <td>0.999286</td>\n",
       "      <td>0.907248</td>\n",
       "      <td>0.912879</td>\n",
       "      <td>0.823544</td>\n",
       "      <td>0.882071</td>\n",
       "      <td>37.597928</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999921</td>\n",
       "      <td>0.907115</td>\n",
       "      <td>0.912757</td>\n",
       "      <td>0.823291</td>\n",
       "      <td>0.881908</td>\n",
       "      <td>29.174704</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999921</td>\n",
       "      <td>0.901969</td>\n",
       "      <td>0.910250</td>\n",
       "      <td>0.813502</td>\n",
       "      <td>0.879590</td>\n",
       "      <td>15.067757</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.898909</td>\n",
       "      <td>0.906831</td>\n",
       "      <td>0.807679</td>\n",
       "      <td>0.874718</td>\n",
       "      <td>31.065185</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>38 rows × 7 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "                              epoch  train_score  test_score  f1_score  \\\n",
       "no_of_features hidden_layers                                             \n",
       "1              3                 11     1.000000    1.000000  1.000000   \n",
       "               3                 11     1.000000    1.000000  1.000000   \n",
       "               3                 11     0.999841    1.000000  1.000000   \n",
       "               3                 11     1.000000    0.999956  0.999961   \n",
       "               3                 11     1.000000    0.999911  0.999922   \n",
       "               3                 22     1.000000    0.999512  0.999571   \n",
       "               3                 11     1.000000    0.999468  0.999532   \n",
       "               3                 11     1.000000    0.999068  0.999182   \n",
       "               3                 11     1.000000    0.999024  0.999144   \n",
       "               3                 11     1.000000    0.998980  0.999105   \n",
       "               3                 22     1.000000    0.998669  0.998832   \n",
       "               3                 11     1.000000    0.998359  0.998560   \n",
       "               3                 11     0.999286    0.994855  0.995499   \n",
       "               3                 11     0.997460    0.988911  0.990164   \n",
       "               3                 11     1.000000    0.984209  0.986319   \n",
       "               3                 11     1.000000    0.978708  0.981642   \n",
       "               1                 11     0.999682    0.931423  0.936965   \n",
       "...                             ...          ...         ...       ...   \n",
       "               1                 22     0.999682    0.926322  0.931996   \n",
       "               1                 11     0.999682    0.926189  0.931865   \n",
       "               1                 11     0.999682    0.926189  0.931865   \n",
       "               1                 22     1.000000    0.925701  0.931383   \n",
       "               1                 11     0.999921    0.924326  0.930299   \n",
       "               1                 11     0.999762    0.922729  0.929083   \n",
       "               1                 22     0.999841    0.922951  0.929053   \n",
       "               1                 11     1.000000    0.920777  0.927357   \n",
       "               1                 11     0.999762    0.914878  0.922941   \n",
       "               1                 11     1.000000    0.916696  0.922184   \n",
       "               1                 22     0.999841    0.913902  0.920843   \n",
       "               1                 11     0.999841    0.913591  0.920053   \n",
       "               1                 11     0.999921    0.912349  0.919864   \n",
       "               1                 22     0.999286    0.907248  0.912879   \n",
       "               1                 11     0.999921    0.907115  0.912757   \n",
       "               1                 11     0.999921    0.901969  0.910250   \n",
       "               1                 11     1.000000    0.898909  0.906831   \n",
       "\n",
       "                              test_score_20  f1_score_20  time_taken  \n",
       "no_of_features hidden_layers                                          \n",
       "1              3                   1.000000     1.000000   24.954079  \n",
       "               3                   1.000000     1.000000   24.954079  \n",
       "               3                   1.000000     1.000000   14.823690  \n",
       "               3                   0.999916     0.999948   26.486510  \n",
       "               3                   0.999831     0.999897   66.155075  \n",
       "               3                   0.999072     0.999433   79.905635  \n",
       "               3                   0.998987     0.999381   43.296997  \n",
       "               3                   0.998228     0.998918   18.580710  \n",
       "               3                   0.998143     0.998867   12.872745  \n",
       "               3                   0.998059     0.998816   19.107379  \n",
       "               3                   0.997468     0.998456  134.621442  \n",
       "               3                   0.996878     0.998096   68.964017  \n",
       "               3                   0.990211     0.994051   45.016607  \n",
       "               3                   0.978903     0.986942   63.613685  \n",
       "               3                   0.969958     0.981977   35.610800  \n",
       "               3                   0.959494     0.975850   28.628547  \n",
       "               1                   0.869536     0.915316   21.562394  \n",
       "...                                     ...          ...         ...  \n",
       "               1                   0.859831     0.908510   46.670596  \n",
       "               1                   0.859578     0.908330   20.351100  \n",
       "               1                   0.859578     0.908330   20.351100  \n",
       "               1                   0.858650     0.907668   57.692709  \n",
       "               1                   0.856034     0.906295   28.694453  \n",
       "               1                   0.852996     0.904778   25.245101  \n",
       "               1                   0.853418     0.904629   57.127875  \n",
       "               1                   0.849283     0.902490   25.294052  \n",
       "               1                   0.838059     0.897011   23.061939  \n",
       "               1                   0.841519     0.894872   41.223776  \n",
       "               1                   0.836203     0.893650   53.601731  \n",
       "               1                   0.835612     0.892352   29.535652  \n",
       "               1                   0.833249     0.892539   26.791100  \n",
       "               1                   0.823544     0.882071   37.597928  \n",
       "               1                   0.823291     0.881908   29.174704  \n",
       "               1                   0.813502     0.879590   15.067757  \n",
       "               1                   0.807679     0.874718   31.065185  \n",
       "\n",
       "[38 rows x 7 columns]"
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "past_scores.sort_values(by='f1_score',ascending=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:51:13.808210Z",
     "start_time": "2017-07-23T22:51:13.791533Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th>epoch</th>\n",
       "      <th>train_score</th>\n",
       "      <th>test_score</th>\n",
       "      <th>f1_score</th>\n",
       "      <th>test_score_20</th>\n",
       "      <th>f1_score_20</th>\n",
       "      <th>time_taken</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>no_of_features</th>\n",
       "      <th>hidden_layers</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th rowspan=\"2\" valign=\"top\">1</th>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>24.954079</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999682</td>\n",
       "      <td>0.931423</td>\n",
       "      <td>0.936965</td>\n",
       "      <td>0.869536</td>\n",
       "      <td>0.915316</td>\n",
       "      <td>21.562394</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                              epoch  train_score  test_score  f1_score  \\\n",
       "no_of_features hidden_layers                                             \n",
       "1              3                 11     1.000000    1.000000  1.000000   \n",
       "               1                 11     0.999682    0.931423  0.936965   \n",
       "\n",
       "                              test_score_20  f1_score_20  time_taken  \n",
       "no_of_features hidden_layers                                          \n",
       "1              3                   1.000000     1.000000   24.954079  \n",
       "               1                   0.869536     0.915316   21.562394  "
      ]
     },
     "execution_count": 17,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "psg = past_scores.sort_values(by='test_score', ascending=False).groupby(by=['no_of_features', 'hidden_layers'])\n",
    "psg.first().sort_values(by='test_score', ascending=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:51:14.248102Z",
     "start_time": "2017-07-23T22:51:14.233023Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th>epoch</th>\n",
       "      <th>train_score</th>\n",
       "      <th>test_score</th>\n",
       "      <th>f1_score</th>\n",
       "      <th>test_score_20</th>\n",
       "      <th>f1_score_20</th>\n",
       "      <th>time_taken</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>no_of_features</th>\n",
       "      <th>hidden_layers</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th rowspan=\"2\" valign=\"top\">1</th>\n",
       "      <th>3</th>\n",
       "      <td>12.375</td>\n",
       "      <td>0.999787</td>\n",
       "      <td>0.996227</td>\n",
       "      <td>0.996715</td>\n",
       "      <td>0.992822</td>\n",
       "      <td>0.995664</td>\n",
       "      <td>44.224500</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>15.000</td>\n",
       "      <td>0.999830</td>\n",
       "      <td>0.919354</td>\n",
       "      <td>0.925616</td>\n",
       "      <td>0.846575</td>\n",
       "      <td>0.899947</td>\n",
       "      <td>34.924257</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                               epoch  train_score  test_score  f1_score  \\\n",
       "no_of_features hidden_layers                                              \n",
       "1              3              12.375     0.999787    0.996227  0.996715   \n",
       "               1              15.000     0.999830    0.919354  0.925616   \n",
       "\n",
       "                              test_score_20  f1_score_20  time_taken  \n",
       "no_of_features hidden_layers                                          \n",
       "1              3                   0.992822     0.995664   44.224500  \n",
       "               1                   0.846575     0.899947   34.924257  "
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "psg.mean().sort_values(by='test_score', ascending=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:51:40.768364Z",
     "start_time": "2017-07-23T22:51:40.762561Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "Train.predictions = pd.read_pickle(\"dataset/tf_lstm_nsl_kdd_predictions.pkl\")\n",
    "Train.predictions_ = pd.read_pickle(\"dataset/tf_lstm_nsl_kdd_predictions__.pkl\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:52:53.489137Z",
     "start_time": "2017-07-23T22:52:53.478319Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Actual</th>\n",
       "      <th>Attack_prob</th>\n",
       "      <th>Normal_prob</th>\n",
       "      <th>Prediction</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>7967</th>\n",
       "      <td>1.0</td>\n",
       "      <td>3.750113e-07</td>\n",
       "      <td>0.964022</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "      Actual   Attack_prob  Normal_prob  Prediction\n",
       "7967     1.0  3.750113e-07     0.964022         1.0"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#epoch_nof_hidden\n",
    "Train.predictions[\"11_1_3\"].sample()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T22:53:19.599296Z",
     "start_time": "2017-07-23T22:53:19.589238Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Actual</th>\n",
       "      <th>Attack_prob</th>\n",
       "      <th>Normal_prob</th>\n",
       "      <th>Prediction</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>10347</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.964006</td>\n",
       "      <td>0.000491</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "       Actual  Attack_prob  Normal_prob  Prediction\n",
       "10347     0.0     0.964006     0.000491         0.0"
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "Train.predictions_[\"11_1_3\"].sample()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:35:20.050200Z",
     "start_time": "2017-07-23T23:35:20.041952Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "df = Train.predictions[\"11_1_3\"].dropna()\n",
    "df_ = Train.predictions_[\"11_1_3\"].dropna()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:35:21.018226Z",
     "start_time": "2017-07-23T23:35:21.011425Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from sklearn import metrics as me\n",
    "def get_score(y_true, y_pred):\n",
    "    f1 = me.f1_score(y_true, y_pred)\n",
    "    pre = me.precision_score(y_true, y_pred)\n",
    "    rec = me.recall_score(y_true, y_pred)\n",
    "    acc = me.accuracy_score(y_true, y_pred)\n",
    "    return {\"F1 Score\":f1, \"Precision\":pre, \"Recall\":rec, \"Accuracy\":acc}\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:35:21.731093Z",
     "start_time": "2017-07-23T23:35:21.680988Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Accuracy</th>\n",
       "      <th>F1 Score</th>\n",
       "      <th>Precision</th>\n",
       "      <th>Recall</th>\n",
       "      <th>Scenario</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.994855</td>\n",
       "      <td>0.995499</td>\n",
       "      <td>0.991497</td>\n",
       "      <td>0.999532</td>\n",
       "      <td>Train+/Test+</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.994855</td>\n",
       "      <td>0.995499</td>\n",
       "      <td>0.991497</td>\n",
       "      <td>0.999532</td>\n",
       "      <td>Train+/Test-</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   Accuracy  F1 Score  Precision    Recall      Scenario\n",
       "0  0.994855  0.995499   0.991497  0.999532  Train+/Test+\n",
       "1  0.994855  0.995499   0.991497  0.999532  Train+/Test-"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn import metrics as me\n",
    "\n",
    "scores = get_score(df.loc[:,'Actual'].values.astype(int),\n",
    "                df.loc[:,'Prediction'].values.astype(int))\n",
    "scores.update({\"Scenario\":\"Train+/Test+\"})\n",
    "score_df = pd.DataFrame(scores, index=[0])\n",
    "\n",
    "scores = get_score(df_.loc[:,'Actual'].values.astype(int),\n",
    "                df_.loc[:,'Prediction'].values.astype(int))\n",
    "scores.update({\"Scenario\":\"Train+/Test-\"})\n",
    "\n",
    "score_df = score_df.append(pd.DataFrame(scores, index=[1]))\n",
    "\n",
    "score_df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:35:21.955352Z",
     "start_time": "2017-07-23T23:35:21.949122Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Actual\n",
       "0.0     9711\n",
       "1.0    12833\n",
       "Name: Actual, dtype: int64"
      ]
     },
     "execution_count": 30,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df.groupby(by=\"Actual\").Actual.count()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:35:23.114841Z",
     "start_time": "2017-07-23T23:35:22.830939Z"
    }
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbYAAAGhCAYAAAAN2pFTAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xe8j/X/x/HH6wwrO0JGKGTvPRpCIpSRJqV869v6Nmj8\n+raVxrehrUUaSIpCGQ1R9migIikSScke57x+f3yuc/pYxxkf5ziX573bdfO53td6fZDXeb2v9/W+\nzN0REREJi7icDkBERCSWlNhERCRUlNhERCRUlNhERCRUlNhERCRUlNhERCRUlNhERCRUlNhERCRU\nlNhERCRUEnI6ABERiY34wie479kes/P59t8/cvczY3bCbKLEJiISEr5nO3mr9YrZ+XYseqZEzE6W\njZTYRERCw8B0h0m/AyIiEiqq2EREwsIAs5yOIscpsYmIhIm6ItUVKSIi4aKKTUQkTNQVqcQmIhIe\nGhUJ6ooUEZGQUcUmIhIm6opUYhMRCQ1DXZGoK1JEREJGFZuISGiYuiJRYhMRCRd1RaorUkREwkUV\nm4hImKgrUolNRCQ89IA2qCtSRERCRhWbiEhY6LU1gCo2EREJGVVsIiJhontsqthERMIjGDwSq+VQ\nVzN7xczWm9k3UW2PmNkyM/vKzN41s6JR224zs+Vm9p2ZdYhqb2hmXwfbhphF+lPNLK+ZjQraZ5tZ\nxfT8LiixiYhIZg0DztynbQpQy93rAN8DtwGYWQ2gN1AzOOZZM4sPjnkOuAKoEiwp5+wH/OnuJwGP\nAw+lJyglNhGRMImz2C2H4O7TgY37tE129z3B6iygXPC5KzDS3Xe6+0pgOdDEzMoAhd19lrs78BrQ\nLeqY4cHnMUDblGouLbrHJiISFkfe7P6XAaOCz2WJJLoUq4O23cHnfdtTjvkFwN33mNkm4FhgQ1oX\nVWITEZGDKWFm86LWh7r70PQcaGb/B+wB3jgskaVBiU1EJExi+xzbBndvlPEQrC/QGWgbdC8CrAHK\nR+1WLmhbwz/dldHt0cesNrMEoAjwx6Guf0TVrCIikhXZOyrygBGYnQkMBLq4+7aoTeOB3sFIx0pE\nBonMcfe1wN9m1iy4f3YJMC7qmD7B5x7Ax1GJ8qBUsYmISKaY2VvAqUS6LFcDdxEZBZkXmBKM85jl\n7le6+7dmNhpYQqSL8mp3TwpO9W8iIyzzA5OCBeBlYISZLScySKV3uuJKR/ITEZFcIK5wOc/b9NqY\nnW/H1FvnZ6YrMqepYhMRCZMja1RkjtDvgIiIhIoqNhGRsDDT7P6oYhMRkZBRxSYiEia6x6bEJiIS\nKuqKVFekiIiEixKbhJKZ5Tez981sk5m9nYXzXGhmk2MZW04xs9Zm9l1OxyGHU87PPHIkyL2RSyiY\n2QVmNs/MtpjZWjObZGatYnDqHkAp4Fh375nZk7j7G+7ePgbxHFZm5mZ2Ulr7uPvn7l4tu2KSHJIy\nMjIWSy6lxCY5xsxuBJ4AHiCShCoAzwBdYnD6E4Dvo94LdVQLJpAVOSoosUmOMLMiwL1E5osb6+5b\n3X23u3/g7gODffKa2RNm9muwPGFmeYNtp5rZajO7KXg1/VozuzTYdg9wJ3BeUAn2M7O7zez1qOtX\nDKqchGC9r5n9aGabzWylmV0Y1T4j6rgWZjY36OKca2YtorZ9amb3mdnM4DyTzazEQb5/SvwDo+Lv\nZmZnmdn3ZrbRzG6P2r+JmX1pZn8F+z5tZnmCbdOD3RYH3/e8qPPfYma/Aa+mtAXHnBhco0GwfryZ\n/W5mp2bpD1ZyVsr72NQVKZIjmgP5gHfT2Of/gGZAPaAu0AS4I2p7aSKvsShL5BXyz5hZMXe/i0gV\nOMrdC7r7y2kFYmbHAEOAju5eCGgBLDrAfsWBCcG+xwKPARPM7Nio3S4ALgWOA/IAN6dx6dJEfg/K\nEknELwIXAQ2B1sB/g1nQAZKAG4ASRH7v2hKZOBZ3bxPsUzf4vqOizl+cSPXaP/rC7r4CuAV43cwK\nAK8Cw9390zTilSOe7rGBEpvknGOJvOspra7CC4F73X29u/8O3ANcHLV9d7B9t7tPBLYAmb2HlAzU\nMrP87r7W3b89wD6dgB/cfYS773H3t4BlwNlR+7zq7t+7+3ZgNJGkfDC7gUHuvhsYSSRpPenum4Pr\nLyGS0HH3+e4+K7juT8ALwCnp+E53ufvOIJ69uPuLwHJgNlCGyA8SIrmeEpvklD+IvOoirXs/xwOr\notZXBW2p59gnMW4DCmY0EHffCpwHXAmsNbMJZnZyOuJJials1PpvGYjnj6jXdqQknnVR27enHG9m\nVc3sAzP7zcz+JlKRHrCbM8rv7r7jEPu8CNQCnnL3nYfYV3IDDR5RYpMc8yWwE+iWxj6/EulGS1Eh\naMuMrUCBqPXS0Rvd/SN3b0ekcllG5B/8Q8WTEtOaA+wba88RiauKuxcGbidyRyUtab6TyswKEhm8\n8zJwd9DVKrmduiKV2CRnuPsmIveVngkGTRQws0Qz62hmDwe7vQXcYWYlg0EYdwKvH+ych7AIaGNm\nFYKBK7elbDCzUmbWNbjXtpNIl2byAc4xEagaPKKQYGbnATWADzIZU0YUAv4GtgTV5FX7bF8HVM7g\nOZ8E5rn75UTuHT6f5ShFjgBKbJJj3P1/wI1EBoT8DvwCXAO8F+xyPzAP+Ar4GlgQtGXmWlOAUcG5\n5rN3MooL4viVyFt6T2H/xIG7/wF0Bm4i0pU6EOjs7hsyE1MG3UxkYMpmItXkqH223w0MD0ZN9jrU\nycysK3Am/3zPG4EGKaNBJRdTV6TeoC0iEhZxxSp63lPvOPSO6bTjvSty5Ru0VbGJiEioaDYCEZEw\nycVdiLGiik1EREJFFZuISIiYKjYltvj8RTyh8HE5HYbkYjXLFcnpECSXW7hg/gZ3L5nV8xhKbKDE\nRkLh4yh74ZM5HYbkYjMGd8zpECSXOyZv3L4z2kgWHPWJTUQkNIxDz0dzFFBiExEJDVNXJBoVKSIi\nIaOKTUQkRFSxKbGJiISKEpu6IkVEJGRUsYmIhIgqNiU2EZHw0HB/QF2RIiISMqrYRERCwvQcG6DE\nJiISKkps6ooUEZGQUcUmIhIiqthUsYmISMioYhMRCRFVbEpsIiLhoefYAHVFiohIyKhiExEJEXVF\nKrGJiISGHtCOUFekiIiEiio2EZEQUcWmxCYiEi7Ka+qKFBGRcFHFJiISFqauSFBiExEJFSU2dUWK\niEjIqGITEQkRVWxKbCIioaEHtCPUFSkiIqGixCYiEiYWw+VQlzJ7xczWm9k3UW3FzWyKmf0Q/Fos\natttZrbczL4zsw5R7Q3N7Otg2xALyk4zy2tmo4L22WZWMT2/BUpsIiKSWcOAM/dpuxWY5u5VgGnB\nOmZWA+gN1AyOedbM4oNjngOuAKoES8o5+wF/uvtJwOPAQ+kJSolNRCQsgufYYrUcirtPBzbu09wV\nGB58Hg50i2of6e473X0lsBxoYmZlgMLuPsvdHXhtn2NSzjUGaGvpCEyDR0REQiTGg0dKmNm8qPWh\n7j70EMeUcve1weffgFLB57LArKj9Vgdtu4PP+7anHPMLgLvvMbNNwLHAhrQCUGITEZGD2eDujTJ7\nsLu7mXksA0oPdUWKiIRIdnZFHsS6oHuR4Nf1QfsaoHzUfuWCtjXB533b9zrGzBKAIsAfhwpAiU1E\nJEyycVTkQYwH+gSf+wDjotp7ByMdKxEZJDIn6Lb828yaBffPLtnnmJRz9QA+Du7DpUldkSIikilm\n9hZwKpF7cauBu4DBwGgz6wesAnoBuPu3ZjYaWALsAa5296TgVP8mMsIyPzApWABeBkaY2XIig1R6\npycuJTYRkRDJzplH3P38g2xqe5D9BwGDDtA+D6h1gPYdQM+MxqXEJiISElm8NxYauscmIiKhosQm\nIpl2Zf/LOKFcKRrVr71X+9h33qZRvVoUzBfPgvnz9tr2yMMPUrt6FerVOpkpkz/KznCPCkfAqMgc\np8QmIpl20cV9ee/9Sfu116hRizdHvUOr1m32al+6dAljRo9i3qJveO/9Sdxw3dUkJSXtd7xknhKb\nEpuIZEGr1m0oXqz4fu0nV69O1WrV9mv/4P1x9Oh1Hnnz5qVipUpUPvEk5s2dkx2hylFEiU1Ess3a\nNWsoV+6fZ3TLlivLr7+uSeMIybCcf44txymxiYhIqGi4v4hkmzJly7J69S+p62tWr+H448umcYRk\nVG6+NxYrqthEJNt06tyFMaNHsXPnTn5auZIVy3+gUeMmOR1WeGTza2uOVEpsIpJpfS6+gNNOacEP\n339HlcrlGf7qywCMH/cuVSqXZ/asLzm3W2e6dIq8N7JGjZp079GThnVr0u3sjjz25NPEx8endQmR\nDLN0zCcZanlLVfGyFz6Z02FILvbN4I45HYLkcsfkjZufldfDpMhXuoqXu2hILEICYMX/zopJXNlN\n99hEREIjd3chxoq6IkVEJFRUsYmIhIgKNiW2o07f1hU5r1l5zGDUrF94dfpPqdsuaXUCF7c8gSR3\nPlmynoc++A6Aq9qeSM+m5UhOdu55dwmff7cBgJs6VuWcRmUpUiCR2rdNzomvI+lQvWolChYslDpI\n44khz9CseYuD7n9c8UKs37g5S9fsf/mlzJj+GYWLFCEuLo7Hn3yaps2aZ+gcE94fz9JlS7h5wK28\nP+49TqpalerVawBw3z130rJVG05ve0aW4gwjdUUqsR1VqpYuyHnNynPOEzPZneQM69+Yj5esZ9WG\nbTQ7qTjtapWi06Mz2JWUzLEF8wBwUqmCdK5fhjMf+pzjiuRlxJVNaPvgZyQ7TFuyntdmrOLj20/J\n4W8mhzJp8seUKFEiW685aPDDnHNuD6ZOmcy1V1/JnPmLM3R8p7O70OnsLgC8//44Op7VKTWx/feu\ne2Mer4SH7rEdRU4sVZDFP//Fjt3JJCU7s1dspEPt0gBc2OIEnp+2gl1JyQD8sWUXAO1qleKDhWvZ\nlZTM6o3bWbVhG3UrFAVg0aq/+H3zzpz5MpIlW7Zs4awOZ9CiaUMaN6jDB+PH7bfP2rVrad/2FJo1\nrk+j+rWZOeNzAKZOmcxpbVrQomlDLjq/F1u2bEnzWq1at+HHFcsBWLx4Eae2bk6ThnXp3fNc/vzz\nTwCefXoIDevWpEnDuvS5KPLuyhGvDePG669h1pdfMPGD8fzfrQNp1rg+P65YQf/LL+XdsWOY/NGH\nXHR+r9RrTf/sU7p3OztTcYaCRboiY7XkVkpsR5Hv126mcaXiFC2QSL7EOE6tXpIyRfMBUKnkMTSu\nXJyx17fgraubUqd8EQBKFcnLr39tTz3Hb5t2ULpIvhyJXzKvY/vTada4Pqe0agZAvnz5GPn2WL6Y\nPZ9Jkz/mtltuZt9Hf0aPfJMz2rVn1tyFzJ63iDp167FhwwYeHjyIDyZN4YvZ86nfsCFPPflYmtee\nOOF9ataKvNbmisv6cN+gwcyZv5iatWrxwP33APC/Rx/iizkLmDN/MU8+/dxexzdr3oKzOndh0OCH\nmTV3IZVPPDF12+ltz2Du3Nls3boVgHfeHkWPXudlKs4wMCAuzmK25FbqijyKrFi/lRc+WcHwfzVh\n+64klq75m+TgH7P4OKNIgUTOffIL6lQowlOX1OeUQZ/mbMASM/t2Rbo7d//3dmbM+Jy4uDh+/XUN\n69ato3Tp0qn7NGzUmKv692P37t107tKNunXrMWP6ZyxbuoS2p7YCYPeuXTRp1uyA1/y/Wwfy0IOD\nKFGiJM++8BKbNm1i06a/aN0m0nV94UV9uOiCSLVVq1YdLutzEZ27dOXsLt3S/b0SEhJo164DEye8\nzznn9uDDDydy/4MPZyhOCR8ltqPM6NmrGT17NQA3n1WV3/7aAUQqsY++/g2Ar37eRLI7xY/Jw7pN\nOzm+aP7U40sXycdvm3Zkf+ASUyPfeoMNGzYwc9Y8EhMTqV61Ejt37P3n2qp1GyZP+4wPJ03gX5df\nyrXX30CxosU4rW07ho9485DXSLnHlmLTpk0H3XfsuA+Y8fl0Jk54n0cGP8CcBV+l+7v06NWbF557\nhmLFitOgQSMKFSqEu6c7zrDJzV2IsaKuyKNMyqCQ44vmo0Pt0oxb8CsAU75eR7OTjgUi3ZKJ8XFs\n3LqLqd+so3P9MuSJj6Nc8fxULHkMi3/+K8fil9j4e9MmSpYsSWJiIp99+gk/r1q13z4/r1rFcaVK\ncWm/K+h7aT8WLVxA46bNmPXlTFYsj9wz27p1Kz98/326rlmkSBGKFi2Weq/urTdH0Kp1G5KTk1n9\nyy+ccupp3P/AQ2z6e9N+98MKFSzI5s0HHqnZus0pLFq0gGGvvESPXucBZCnO3E5zRapiO+o827cB\nRQsksifZuWvst2zesQeAt+f8wkO96zBpQGt2JyUz4K3IT8w/rNvChEVr+eiW1iQlO3e98y3Jwa2Y\nWzpXo0uD48mfGM/MO09j9OzVPPnRDzn11SQDzjv/Qnqe24XGDerQoGEjqlU7eb99pk//lCcee5TE\nxEQKFizIiy8Pp2TJkrzw4qv0veQCdu6MDBy66+77qFK1arquO/TlYVx/zVVs27aNSpUq8/yLr5CU\nlES/Sy9m06ZNuDtXXX0tRYsW3eu4Hr16c81V/Xnumad4462399oWHx9Px46deH3EcIa+PAwgy3FK\n7qa5IjVXpGSR5oqUrIrVXJH5j6/qJ/V7JhYhAfDN/e1z5VyR6ooUEZFQUVekiEhIGJp5BJTYjmhj\nr29BnoQ4ihZIJG9iHOs2Re4V/OuV+az5c/shjk6/E0oU4JPbT+W/Y77hjS9+BuD+HrWYu3Ij4+b/\nGrPrFCmQSKe6ZXjzy8g1yhTNx21nn8x1IxbF7BpyaKe0asbOnTv588+N7Ni+nTLBG6xHvf0uJ1Ss\nGPPr3XPXHRx7bAmuue4/XNb3Ys45pztnd917SP9lfS9m1hczKVwk8vxkoUKFmPLx9JjHEn65e9BH\nrCixHcHOffILALo3Lkvt8kW4e+ySA+4XZ6QO6Mis3//eyWVtKjFy1i8kZfVkB1G0QCIXtKiQmtjW\n/rVDSS0HfDZjFhCZ2WPh/Hk89uTTORxRxEOPPLZfwou2Z88eEhISDrqe3uMk/HSPLReKjzMWDWrH\nf7tVZ+LNrahboSgz7zyNQvki//PWO6EoI65sAkCBPPE83LsO7/6nBe/f2JLTaxx3wHP+vnknc1du\n5JyGZffbVrFEAYb1b8y4G1oy8upmVCp5TGr72OtbMGlAa27qWJVFg9oBUDBvAq9f1YTxN7Zk4s2t\nUq85sFM1Kh93DB/c1IqBnapxQokCfHBT5AHacTe0TD0vwOhrmlH9+ELpjl+y7pWXhnLbLTenrr/4\nwnPcfusAVixfTqN6tehz0fk0qFODiy84j+3bIz0G8+fNpcMZp9KyWSO6nX0W69ati2lM99x1B5df\n1oe2p7ai/+WXMuyVlzivxzl0bH86XTqdSXJyMrcMuJFG9WvTuEEd3h07BoCPp03lzHan0b3b2TRp\nUCemMR3pNKWWEluuVTh/InNWbOSsR2ewcNXBnyu7tv1JTF/2O+c88QUXPjeb/+tSnTwJB/5jf37a\nCq44rdJ+f6EH9arNne98S9fHZ/LIhO+4+9zIRLR3nVuTFz/9kY6PfM76qDkjd+xO4spXFtDlsZlc\n/Pwc7uhaHYCHJ3zHj+u30vl/M3h4wnd7XeODRb/SqV4ZIPIQeJECiSz9dXOG4pes6dGrN++Pe489\neyKPgIx4bRiX9LkMgKVLl3D1tdez4Ksl5M2Xj5dffIGdO3cy4Kb/8MbIMcycNY/eF1zIfXf/N9PX\nv2XAjTRrXJ9mjetz+WV9Utu//24ZEz6cyivDRgCwePFC3hz1DhM/msrYd97mu2XLmD1vEe9PnMwt\nA25k/fr1ACyYP48nhjzDgq8O3NMRVnqO7TB2RZqZA4+5+03B+s1AQXe/+3Bd8wAxDAM+cPcx2XXN\n7LJzTxIffX3on45bVyvJqdWP48q2kfn18ibGUbZYflb+vnW/fX/asI2lv26mc5BgAArlS6D+CUV5\ntm+D1Lb4YA65ehWKctmLcwEYv+BXbuoYeUbIDAZ2rkajSsVI9si9tGLHJKYZ54RFa3mpXyOenrKc\nTvXKMGnxbxmOX7KmcOHCtGzdhskfTqJi5crEx8dzcvXqrFi+nIoVK9GkaWRKqt7nX8irL79I61NO\nZemSb+ncMVKpJyUlUbZsuUxf/2BdkZ3P7kK+fP/MT9r2jHYUK1YMgC9nzqDneb2Jj4+ndOnSNG/R\nigXz55EnTx6aNG1O+QoVMh2P5F6Hs+N5J3CumT3o7hsyerCZJbj7nsMQVyjs3J2813pSsqdOWpo3\nqqIxiww2+fmPbek67zNTlvPExfVSq0Az2LhlF53/NyPdsZ3TqCyF8iVw9mMzSUp2Zt55GnkT4tM8\n5tc/d7B1Z1Lqa3JSHhDPaPySNX0v7cdTTz5OhRNO4OJL+qa27/vTu5nh7tSqXeewD/IoUOCYNNcP\n5phj0rdfqOTyLsRYOZx9OnuAocAN+24ws4pm9rGZfWVm08ysQtA+zMyeN7PZwMNmdreZDTezz81s\nlZmda2YPm9nXZvahmSUGx91pZnPN7BszG2q5uYbOpNUbt1O7XGRE2Zl1/pnIdvqy3+nT+oTU9Rpl\nC6d5nh/WbeHnDds49eSSAPy9fQ+/b95J+9qlgMj/NCcfXwiAxT//lfram7P3qvIS+WPLLpKSnVZV\nS1AmmGty6449HJP34AluwqK1XNX2RPIkxLF83ZZMxS9Z07xFS378cQXvjh1D957npbb/9NNK5s+L\nVOejR71F8xYtqV69Br+uWcO8uXMA2LVrF0uWfJut8bZo1Zoxo0eRnJzMunXrmPXlTBo0zHXPE8dM\nynD/o70r8nDfrHgGuNDMiuzT/hQw3N3rAG8AQ6K2lQNauPuNwfqJwOlAF+B14BN3rw1sBzoF+zzt\n7o3dvRaQH+h8WL7NEezJj37g3u41ee8/Ldid9E81N2TycvLniWfSgNZ8OLA113eocshzPT1lOccX\n+2fi4+teW8gFzSsw4eZWfDSwTeoAjnveXcK/Tq/MxJtbUe7YAmzeHimw35u/hgYVizFpQGs61y/D\nyvWRbsMNW3bxzeq/mTSgNQM7VdvvuhMXr6VLg+OZsGhtluKXrDnn3O60bNWGIkX++d/25JOrM+TJ\nx2lQpwbbt23jssv7kzdvXl4f+Ta3DryJJg3r0qJJA+bOmZ3p60bfY2vWuD5JSUnpiLUHVatVo0nD\nunTu2I7BD/+P447TAKOj3WGbUsvMtrh7QTO7F9hNJBEVdPe7zWwDUMbddwdV11p3LxHcE/vE3YcH\n57gb2O3ug8wsLjhHPnf34Lwb3f0JM+sODAQKAMWBp9x98MHusZlZf6A/QHyhkg0rXD7ssPwehF3+\nPPFs3xX5x6drw+PpULs0/x62IIejyn5hm1Kra+eO3Dzw1tTXy6xYvpwLz+/JrLkLcziy8IrVlFrH\nlK3m1a96PhYhATD/v6fnyim1suPhjieABcCr6dx/31EBOwHcPdnMdvs/mTgZSDCzfMCzQCN3/yVI\nhmm+CdPdhxLpJiVvqSpH92SZWVCnfBH+260GcQabtu9m4Fvpf9WIHHn++OMPTmvdnPoNG6UmNcl9\ncnMXYqwc9sTm7hvNbDTQD3glaP4C6A2MAC4EPs/CJVKS2AYzKwj0AEI3CvJINHvFxgwNKpEj27HH\nHstXS/Z/tcuJJ52kak1ylex6HP9/wDVR69cCr5rZAOB34NLMntjd/zKzF4FvgN+AuVkJVEQkN1PB\ndhgTm7sXjPq8jsj9r5T1VUQGhOx7TN991u9O45x3R32+A7jjUOcTEQk1U1ckaOYREREJGc0MKiIS\nEpHn2HI6ipynik1EREJFFZuISGjk7hlDYkWJTUQkRJTX1BUpIiIho4pNRCRE1BWpxCYiEh56bQ2g\nrkgREQkZVWwiIiGR8j62o50Sm4hIiCixqStSRERCRhWbiEiIqGBTYhMRCRV1RaorUkREQkaJTUQk\nLILn2GK1pOuSZjeY2bdm9o2ZvWVm+cysuJlNMbMfgl+LRe1/m5ktN7PvzKxDVHtDM/s62DbEslB6\nKrGJiEimmFlZ4DqgkbvXAuKB3sCtwDR3rwJMC9YxsxrB9prAmcCzZhYfnO454AqgSrCcmdm4lNhE\nRELCgtn9Y7WkUwKQ38wSgALAr0BXYHiwfTjQLfjcFRjp7jvdfSWwHGhiZmWAwu4+y90deC3qmAxT\nYhMRCZEYd0WWMLN5UUv/6Gu5+xrgUeBnYC2wyd0nA6XcfW2w229AqeBzWeCXqFOsDtrKBp/3bc8U\njYoUEZGD2eDujQ62Mbh31hWoBPwFvG1mF0Xv4+5uZn54w9ybEpuISIjEZe9w/zOAle7+O4CZjQVa\nAOvMrIy7rw26GdcH+68BykcdXy5oWxN83rc9U9QVKSISItk8KvJnoJmZFQhGMbYFlgLjgT7BPn2A\nccHn8UBvM8trZpWIDBKZE3Rb/m1mzYLzXBJ1TIapYhMRkUxx99lmNgZYAOwBFgJDgYLAaDPrB6wC\negX7f2tmo4Elwf5Xu3tScLp/A8OA/MCkYMkUJTYRkZCIVFrZO/OIu98F3LVP804i1duB9h8EDDpA\n+zygVixiUmITEQmROM2opXtsIiISLqrYRERCRJMgK7GJiISK8pq6IkVEJGRUsYmIhIQRmS/yaKfE\nJiISIhoVqa5IEREJGVVsIiJhkbHXzYSWKjYREQkVVWwiIiGigk2JTUQkNIxsf23NEUldkSIiEiqq\n2EREQkQFmxKbiEioaFSkuiJFRCRkVLGJiIRE5EWjOR1FzlNiExEJEY2KVFekiIiEjCo2EZEQUb2W\nRmIzs8JpHejuf8c+HBERyQqNiky7YvsWcPb+ASBl3YEKhzEuERGRTDloYnP38tkZiIiIZE1kSq2c\njiLnpWvwiJn1NrPbg8/lzKzh4Q1LREQyLHhtTayW3OqQic3MngZOAy4OmrYBzx/OoERERDIrPaMi\nW7h7AzNbCODuG80sz2GOS0REMiEXF1oxk56uyN1mFkdkwAhmdiyQfFijEhERyaT0VGzPAO8AJc3s\nHqAXcM+wwjPxAAAgAElEQVRhjUpERDIlN98bi5VDJjZ3f83M5gNnBE093f2bwxuWiIhklEZFRqR3\n5pF4YDeR7khNwyUiIkes9IyK/D/gLeB4oBzwppnddrgDExGRjNNw//RVbJcA9d19G4CZDQIWAg8e\nzsBERCTjcm86ip30dCuuZe8EmBC0iYiIHHHSmgT5cSL31DYC35rZR8F6e2Bu9oQnIiLpZab3sUHa\nXZEpIx+/BSZEtc86fOGIiEhWKK+lPQnyy9kZiIiISCwccvCImZ0IDAJqAPlS2t296mGMS0REMiE3\nj2aMlfQMHhkGvEpksE1HYDQw6jDGJCIimWQWuyW3Sk9iK+DuHwG4+wp3v4NIghMRETnipOc5tp3B\nJMgrzOxKYA1Q6PCGJSIiGWWYRkWSvsR2A3AMcB2Re21FgMsOZ1AiIiKZlZ5JkGcHHzfzz8tGRUTk\nSJPL743FSloPaL9L8A62A3H3cw9LRCIikmkaFZl2xfZ0tkWRg2qVK8LMh8/K6TAkFyvW+JqcDkFE\noqT1gPa07AxERESyTu8VS//72ERE5AhnqCsSlNxFRCRk0l2xmVled995OIMREZGsiVPBlq43aDcx\ns6+BH4L1umb21GGPTEREMizOYrfkVunpihwCdAb+AHD3xcBphzMoERGRzEpPV2Scu6/a54Zk0mGK\nR0REMikyeXEuLrViJD2J7RczawK4mcUD1wLfH96wREQkM3JzF2KspKcr8irgRqACsA5oFrSJiIgc\ncQ6Z2Nx9vbv3dvcSwdLb3TdkR3AiIpIx2f0+NjMramZjzGyZmS01s+ZmVtzMppjZD8GvxaL2v83M\nlpvZd2bWIaq9oZl9HWwbYlnoU03PG7Rf5ABzRrp7/8xeVEREYs8gJ15b8yTwobv3MLM8QAHgdmCa\nuw82s1uBW4FbzKwG0BuoCRwPTDWzqu6eBDwHXAHMBiYCZwKTMhNQeroipwLTgmUmcByg59lERI5y\nZlYEaAO8DODuu9z9L6ArMDzYbTjQLfjcFRjp7jvdfSWwHGhiZmWAwu4+y90deC3qmAxLz2trRu3z\nRUYAMzJ7QREROXxiPJ1UCTObF7U+1N2HRq1XAn4HXjWzusB84HqglLuvDfb5DSgVfC4LzIo6fnXQ\ntjv4vG97pmRmrshK/BOkiIiE1wZ3b5TG9gSgAXCtu882syeJdDumcnc3s4O+Au1wSM89tj/55x5b\nHLCRfQIXEZEjQzbfYlsNrI56IfUYIvlhnZmVcfe1QTfj+mD7GqB81PHlgrY1wed92zMlzao1GJVS\nFygZLMXcvbK7j87sBUVE5PAwM+JiuByKu/9G5FnnakFTW2AJMB7oE7T1AcYFn8cDvc0sr5lVAqoA\nc4Juy7/NrFmQdy6JOibD0qzYghJyorvXyuwFREQk1K4F3ghGRP4IXEqkaBptZv2AVUAvAHf/1sxG\nE0l+e4CrgxGRAP8GhgH5iYyGzNSISEjfPbZFZlbf3Rdm9iIiIpI9snu0v7svAg50H67tQfYfBAw6\nQPs8ICZF1EETm5kluPseoD4w18xWAFuJPCrh7t4gFgGIiEjsaEqttCu2OURGu3TJplhERESyLK3E\nZgDuviKbYhERkSzIoZlHjjhpJbaSZnbjwTa6+2OHIR4REckC5bW0E1s8UJCgchMREckN0kpsa939\n3myLREREssY0eATScY9NRERyD9M/3WnOPHLAZxBERESOZAet2Nx9Y3YGIiIiWRMZFZnTUeS8zMzu\nLyIiRygltpi/ukdERCRnqWITEQkR04NsqthERCRcVLGJiISEBo9EKLGJiISFaUotUFekiIiEjCo2\nEZEQ0ez+SmwiIqGhe2wR6ooUEZFQUcUmIhIi6olUYhMRCREjTrP7qytSRETCRRWbiEhIGOqKBFVs\ncgB//fUX55/Xg7q1TqZe7erM+vLLnA5JRNIjeIN2rJbcShWb7OfmG66nffszeWvUGHbt2sW2bdty\nOiQRkXRTYpO9bNq0iRkzpvPiK8MAyJMnD3ny5MnZoEQk3fSAtroiZR8/rVxJiRIl6d/vUpo1qs9V\n/S9n69atOR2WiEi6KbHJXvbs2cOihQu44l9XMWveQgoccwyPPjw4p8MSkXRIGTwSqyW3UmKTvZQt\nV46y5crRpGlTAM7p3oNFCxfkcFQikl5xZjFbcislNtlL6dKlKVeuPN9/9x0An348jZOr18jhqERE\n0k+DR2Q/jz3xFJdeciG7du2iYuXKDH3p1ZwOSUTSKRcXWjGjxCb7qVuvHjNnz8vpMEQkgwx1w4F+\nD0REJGRUsYmIhIWBqS9SiU1EJEyU1pTYjkjVTqpIoYKFiI+PB+CJp56leYsWB92/RNGCbPhrS5au\necVlfZk2bQpLv/+RvHnzsmHDBlo2a8R3y3/K0nn3NX7ce1SpUpXqNSIjLe+9+05atW7D6W3PiOl1\nJHaev+tCOrapxe8bN9Oo5wOp7Q/8pxtntanFrt1JrFy9gf53vc6mLdtJSIjjuTsvpN7J5UmIj+ON\nCXN49JXJ5M+XyBsP96NyuRIkJTsTp3/Nf4eMB+Dhm86lTeOqABTIl4eSxQtSps3AHPm+kvspsR2h\nPpz6CSVKlMjWa8bHxzP81Vfof+VVh+0a7497j46dOqcmtjvvvvewXUtiY8T7s3h+1Ge8dN8le7VP\nm7WM/z41nqSkZO6/risDLmvPHUPG0f2MBuTNk0DjXg+QP18iC9+5g9GT5vH7n5t54rVpTJ/3A4kJ\n8Ux64Vrat6zB5JlLGPi/sannvar3KdStVi67v2YoGJpSCzR4JNfYsmULHdu3pXnjBjSqV5v3x4/b\nb5+1a9dyxmltaNqwHg3r1WLGjM8BmDplMqe0ak7zxg24oHdPtmw5cHV3zbX/4akhj7Nnz579tj32\nv0do2awxjevX4b577kptf3DQfdSpWY3TT2nFJRedz+OPPQrAKy+9SMtmjWnSoC69e3Vn27ZtfPnF\nF0z4YDy33zqApg3r8eOKFVxxWV/GvjOGyR99yAW9e6aed/pnn3Ju184Zil8Oj5kLVrBx0/4TYU+b\ntYykpGQA5ny9krKligLgOAXy5SE+Po78efOwa3cSm7fuYPuO3Uyf9wMAu/cksWjZL5Q9ruh+5+11\nZkNGfzj/MH6jcLMYLrmVEtsR6swzTqNpw3q0bhGZASRfvnyMGvMuX85dwIdTP+HWgTfh7nsdM2rk\nm7Rr34HZ8xcxZ/5i6tatx4YNGxj8wP1M/GgqX85dQIOGjRjyxGMHvGb5ChVo0aIVb74+Yq/2qVMm\ns+KHH5jx5Rxmz1/EwgXzmfH5dObNnct7Y99hzvzFjPtgEgvm//OIQNdzzmXmrLnMWbCYk0+uzrBX\nXqZ5ixZ06tyFBwY/wuz5i6h84omp+5/e9gzmzpmdOi/lmNGj6Nmrd4bil5xzSdfmfDRzCQBjpy5k\n245drJwyiO8n3csTr03jz7/3ToxFCubnrDa1+WTOd3u1VyhTjBOOP5ZP5+7dLpIR6oo8Qu3bFenu\n3HnH7cz8fDpxcXH8umYN69ato3Tp0qn7NGrUmH9dcRm7d+/m7C7dqFuvHp9P/4xlS5dwepuWAOza\nvYumTZsf9LoDbrmNnt27cuZZnVLbpk6ZzNSpk2nWqD4AW7ZuYfkPP7B582Y6d+lKvnz5yJcvH2d1\nOjv1mCXffsPdd97Bpr/+YsvWLbRr1yHN75uQkED79mcy4YP3Obd7DyZNmsCgwQ9nOH7JfgP7dSAp\nKZmRE+cC0LhmRZKSkqnc/v8oVqgAU1+5gY9nL+OnNX8AEB8fx/DBfXn2rU9T21L07NCQ96YtIjnZ\n97uOpI96IpXYco2Rb77Bhg2/88Wc+SQmJlLtpIrs3LFjr31atW7DlI+n8+HECfTv15fr/nMjRYsV\n4/Qz2vHa62+l6zonValCnbr1eOft0alt7s6Agbdxef9/7bXvU08+cdDzXNGvL6PHvEedunUZMXwY\n0z/79JDX7nleb5579mmKFy9Og4aNKFSoEO6eofgle110dlPOalOLjv8aktrWq2MjJn+xhD17kvn9\nzy18uehHGtaokJrEnrnjfFb8/DtPv/npfufr0aEhNwwevV+7pJdpuD/qisw1Nm3aRMmSx5GYmMhn\nn37Cz6tW7bfPqlWrKFWqFJddfgV9L7uchQsX0KRpM778YiYrli8HYOvWrfzw/fdpXuuWW/+PJx5/\nNHW9XfsODB/2Suq9rTVr1rB+/Xqat2jJxA/eZ8eOHWzZsoVJEz9IPWbL5s2ULlOG3bt3M/KtN1Lb\nCxYqxJbNmw943dZtTmHRwgW88vKL9OzVGyBT8Uv2aNeiOjf2PYMe/3mB7Tt2p7av/m0jpzauBkRG\nODapU5HvfloHwF3/7kyRQvm5+ZF39jtf1YqlKFa4ALMWr8yeLyChpYotl+h9wYV073Y2jerVpkHD\nRlQ7+eT99vn8s095/LFHSExI5JiCBXn51dcoWbIkL748jEsuOp9dO3cCcNe991OlatWDXqtGzZrU\nq98gdVb/M9q1Z9nSpZzaKtIFeEzBgrw6/HUaNW5Mp7O70LhBHY47rhQ1a9WmSOEiANx59320admU\nEiVK0rhJ09Rk1rNXb66+6gqefXoIb44as9d14+Pj6XhWZ15/bRgvvTIcIFPxS2wNf7AvrRtWoUTR\ngiz/8D7ue34iw9/7ksdv6UXePAl88Nw1AMz5+ieuGzSS50dNZ+g9FzF/zP9hBiPGzeKbH36l7HFF\nufWKM1n24298+dYtADw/6jOGvfslEOmGfPsjDRrJCk2pFWH7DkA42jRs2Mg1L2LmbdmyhYIFC7Jt\n2zbandaGp58bSv0GDXI6rGxVrPE1OR2C5HI7Fj0z390bZfU8J9ao6w+8MTEWIQHQu0G5mMSV3VSx\nSZZcfVV/li1Zwo6dO7jo4j5HXVITOdLoHpsSm2TR8BFv5nQIIhJFaU2JLVdq3aIpu3buZOOfG9mx\nfTvHH18WgNHvvMcJFSvG/Hp333kHxx5bgmuv/89+7a8Nf5WSJUqmtk399HMKFSoU8xgk66a/djN5\n8iRQvHAB8uVL5Nf1mwDodcNQfl67MWbXqVy+BPNG3873q9aTJzGez+b+kKmRjuOfuZoLBrxEYkI8\n3ds34KUxMwAoV6ooD95wDhffqvcEyoEpseVCn38xG4ARw4cxf/48nhjydI7FcsONA/ZLeNH27NlD\nQkLCQdcPxt1xd+LidCs8VtpcEhnpetHZTWlYowI3PPT2AfeLi7MsP0f2/ar1NOs9mISEOCa/eD2d\nTqnNhM++ztA5ulz9DBBJlJf3aJWa2Fav+0tJ7WA0uz+gATSh8vKLQ7l14M2p60Off47bbhnAiuXL\naVC3Jhdf2Jt6tatz4fm92L59OwDz5s6l3emn0KJJQ7p27si6deuyHMerL79Ez+7d6HDGaZx9Vgc+\nnjaV9m1P5dyunWlUvzYA/3v0YRrWq0XDerV49umnAFixfDn169Sg78UX0qBuTdauXZvlWOTQ4uPj\nWDv9YR65uTtzRt1G41oVWf7hfRQpmB+AJrUrMuH5yACZY/LnYeg9F/H5iJv58q1bOKtNrTTPvWdP\nMrO/+okTy5fEzHjopnOZ9/btzB19O+ecUQ+A40sWYdorNzBr5K3Me/t2mtWtBJAaw/3XdaXqCccx\na+St3HddFyqXL8GskbcCMOONgVQ54bjU60175QbqVC2b4TjDImVUZKyW3EoVW4j0PK83zRrX5/4H\nBpOQkMBrw19NHTa/dMkSnnvhZZo2a0a/vpfw0tAX6H/lVdx84/WMeXc8JUqU4K033+Deu/7LM88P\nTfc1H3/sEV5/bRgAx5YowcSPpgKweNFCZs9bRLFixfh42lQWzJ/Hgq+WUKFCBebMns2oN99gxpdz\n2bNnD61bNKHNKaeSP39+vlu2jJdeeY2GjXLdQKxcrWihAsxYsJwBj+7/fFm02/t3ZMoXS+l/1+sU\nLZSf6SMGMG3WMnbu2n9+UYg8x3ZK46rc8eQ4urerT7VKpWhy3oOULFaQGa8PZMb85ZzfqTETp3/N\n/4ZNJS7OyJ83ca9z3DFkHJXLl6RZ78FApIJL8c5H8+nevgGDX/yQsscVpViRAnz1/RoGXd81Q3FK\nuGR7YjOzbsC7QHV3X2ZmFYEW7v5msL0ecLy7Z2rMqpn9BDRy9w2xiTj3KFy4MK1ateGjDydRqVJl\n4uPjObl6dVYsX07FSpVo2qwZAOdfeBEvvzSUNqecytIl39KpQ+SVMUlJSZQtl7FZ1Q/WFXnGGe0p\nVqxY6nrTZs2pUKECAF98MYNu53Ynf/5IRXB2l27MnPE5Z7RrT+UTT1RSywE7d+1m3MeLD7lf2+bV\nad+yJjdd2g6AfHkSKF+6OMt/Xr/XfikVVnKyM/6TxXw8exmP3dKT0R/OJznZWffHZr5YtIIGNSsw\n79ufefqO3uTNk8j7n37F19+vSXfc70xZwJgnrmTwix/So0MDxk5ZmKE4w0hdkTlTsZ0PzAh+vQuo\nCFwApAyvqwc0AmL3MMZRpO9llzPkycc44YSKXNLn0tT2ff+ymxnuTq3adZj26ecxj6PAMcekuX4w\nxxRI334SW9t37t5rfU9SMnFxkb8zefP8U0GZQa8bh7Jyddo/N6bcY0uPz+Z+T4fLn+TM1rV46b6L\neXzYVEZOSt+zpT+v/ZOt23dycuXS9GjfgCvuej1DcYaR0lo2d6OaWUGgFdAP6B00DwZam9kiM7sF\nuBc4L1g/z8yamNmXZrbQzL4ws2rBueLN7FEz+8bMvjKza/e5Vn4zm2RmV2TjV8xxLVq2ZOWKFYx9\n52169Dovtf2nlSuZNzcySe2ot96kRYtWVK9Rg19/XcPcOXMA2LVrF0u+/fawx9iyZWvGv/cu27dv\nZ8uWLXzw/jhatmp92K8r6bfq143Urx6psFPuhQFM/WIp/+59Sup6Rt6bNnPBcnp2aIiZcVzxQjSv\nW5kF3/5MhTLF+O2Pv3ll7ExGjJtF3ZPL73Xclq07KVQg70HPO+ajBQy4tD158iSw7Mffshyn5H7Z\nXbF1BT509+/N7A8zawjcCtzs7p0BzGwdka7Ea4L1wkBrd99jZmcADwDdgf5Eqr16wbbiUdcpCIwE\nXnP317Lryx0pzuneg++WLaNIkSKpbSdXr86QJx/jq8WLqFmrNv2u6E/evHl5c+QYbrrhOjb//TdJ\nyUlc/5+bqFGzZrqvFX2PDWDMe+8f8pjGTZrQs/f5tGreGIAr+l9Frdq1U+eDlJx3//MTefbO89m0\neTszFvzz5zLohUk8MqA7c0ffTlycseKX3+l1Q/ruyY6duogmdSoxd/RtuMMtj43l9z+3cEnXZlx3\n0ens3pPElm076XfH8L2OW79xMwuX/sLc0bfz4YxvePXdL/Y570Ieuulc7n1uQkzizO1yoifSzOKB\necAad+8c/Hs8isi/0T8Bvdz9z2Df24gUN0nAde7+UdDeEBgG5CfSY3e9Z3JqrGydUsvMPgCedPcp\nZnYdUAH4gL0TW1/2TmzlgSFAFcCBRHc/2czeAZ539yn7XOMnYBPwsLu/wQGYWX8iiZHyFSo0/H7F\n/hMK52ZdOp3JgFtuo3WbyE+sK5Yv54LzejB7/qIcjiycNKWWZFWsptSqUrOuPzZycixCAqBLndLp\nisvMbiRyC6lwkNgeBja6+2AzuxUo5u63mFkN4C2gCXA8MBWo6u5JZjYHuA6YTSSxDXH3SZmJO9u6\nIoMMfjrwUpB8BgC9OHSX8H3AJ+5eCzgbyJeOy80EzrSD3EV196Hu3sjdG0U/XJzb/fHHH9SqXoWi\nxYqlJjURkcPJzMoBnYCXopq7Aiml93CgW1T7SHff6e4rgeVAEzMrQyQpzgqqtNeijsmw7LzH1gMY\n4e4nuHtFdy8PrASSgeipKjbvs14ESBkm1TeqfQrwLzNLgNTEmeJO4E/gmZh+gyPcscceyzdLf9jv\n3WUnnnSSqjWRo4RZ7BaghJnNi1r6H+CSTwADifxbnqKUu6c8iPobUCr4XBb4JWq/1UFb2eDzvu2Z\nkp2J7Xwiw/yjvUNkEEmSmS02sxuAT4AaKYNHgIeBB81sIXvfE3wJ+Bn4yswWExlZGe16IH9QEouI\nHAUspv8BG1J6t4JlrxuVZtYZWO/uB33fUFCBZetrZLJt8Ii7n3aAtiEH2hdovM969Mu37giO3QPc\nGCzR56wYtXopIiJyuLQEupjZWURuExU2s9eBdWZWxt3XBt2MKQ8QrgGih72WC9rWBJ/3bc+U3Dxr\nioiI7CPGXZFpcvfb3L1cUFD0Bj5294uA8UCfYLc+wLjg83igt5nlNbNKRAYFzgm6Lf82s2bB2IhL\noo7JME2pJSIisTYYGG1m/YBVRAYK4u7fmtloYAmwB7ja3ZOCY/7NP8P9JwVLpiixiYiERGQS5JyZ\ne8TdPwU+DT7/AbQ9yH6DgEEHaJ8HxGS2aiU2EZGwSGcXYtjpHpuIiISKKjYRkRBRxabEJiISKqb5\n/dUVKSIi4aKKTUQkJAyIU8GmxCYiEibqilRXpIiIhIwqNhGRENGoSCU2EZFQUVekuiJFRCRkVLGJ\niISERkVGqGITEZFQUcUmIhIapntsKLGJiISHZvcH1BUpIiIho4pNRCREVLApsYmIhEZkVKRSm7oi\nRUQkVFSxiYiEiOo1JTYRkXBRZlNXpIiIhIsqNhGRENED2kpsIiKhokGR6ooUEZGQUcUmIhIiKtiU\n2EREwkWZTV2RIiISLqrYRERCwtCoSFDFJiIiIaOKTUQkLPQ+NkCJTUQkVJTX1BUpIiIho4pNRCRM\nVLIpsYmIhIdpVCTqihQRkZBRxSYiEiIaFanEJiISGoZusYG6IkVEJGRUsYmIhIlKNiU2EZEw0ahI\ndUWKiEjIqGITEQkRjYpUYhMRCRXlNXVFiohIyKhiExEJCz3IBqhiExGRkFHFJiISIhrur8QmIhIa\nhkZFgroiRUQkZFSxiYiEiAo2JTYRkXBRZlNXpIiIZI6ZlTezT8xsiZl9a2bXB+3FzWyKmf0Q/Fos\n6pjbzGy5mX1nZh2i2hua2dfBtiFmmb9bqMQmIhIiFsP/0mEPcJO71wCaAVebWQ3gVmCau1cBpgXr\nBNt6AzWBM4FnzSw+ONdzwBVAlWA5M7O/B0psIiIhYha75VDcfa27Lwg+bwaWAmWBrsDwYLfhQLfg\nc1dgpLvvdPeVwHKgiZmVAQq7+yx3d+C1qGMyTIlNRESyzMwqAvWB2UApd18bbPoNKBV8Lgv8EnXY\n6qCtbPB53/ZM0eAREZEQifHYkRJmNi9qfai7D93vmmYFgXeA/7j739G3x9zdzcxjG1balNhERMIk\ntpltg7s3SvNyZolEktob7j42aF5nZmXcfW3Qzbg+aF8DlI86vFzQtib4vG97pqgrUkREMiUYufgy\nsNTdH4vaNB7oE3zuA4yLau9tZnnNrBKRQSJzgm7Lv82sWXDOS6KOyTBVbCIiIRGZ3D9bH2RrCVwM\nfG1mi4K224HBwGgz6wesAnoBuPu3ZjYaWEJkROXV7p4UHPdvYBiQH5gULJmixCYiIpni7jM4eOdn\n24McMwgYdID2eUCtWMSlxCYiEhbpHKYfdkpsIiIhorymwSMiIhIyR33FtmDB/A35E21VTsdxhCsB\nbMjpICRX09+htJ0QszOpZFNic/eSOR3Dkc7M5h3qWRaRtOjvUHZJ9xyPoaauSBERCZWjvmITEQkT\njYpUYpP02W9uOJEM0t+hbGDoFhuoK1LS4UCTnopkhP4OSXZSxSYiEiYq2ZTYRETCRKMi1RUpIiIh\no4pNssTMqgNlgM/dfXdOxyO5h5mZu2frCyiPBhoVqcQmWdebyIsDk8zsCyU3Sa+UpGZmzYCf3P23\nHA4pFJTX1BUpWXcP8BNwHtAqeJuuyEGZWX0zyxN8PpHIK0z25GxUEiZKbJJhwRtuAXD3ZCL/MK1F\nyU3S527g/SC5rQQ2AbsAzCzOzOJzMLbcLXhtTayW3EqJTTIk+r6ImbU3s1OBosD9wM9EklsLJTfZ\nl5nFAbh7V+BPYDRQkEjFXyDYlgzkyaEQJSR0j00yJCqp3QicQ+QV71cAL7n7A2Z2C9AfSAJm5Fig\nckQJfiBKDj6XdPfeZjYO+JLI35UyZpYEJAJrzew2d9+egyHnYrm41IoRJTbJMDM7AzjN3Vub2YNA\nE+B8M8PdHzKzG4DlORulHEmifiC6DmhkZle5e1czex5oCzwMxBOp/r9TUsscI3d3IcaKEpsc0gGG\nZf8CXGtmfYHGwFnA48DdZpbo7o/nQJhyhDOzc4A+QGd33wrg7lea2dvAfUA3d9cgEsky3WOTNO1z\nT62pmRUDVrr7T0AV4Dl3Xwt8BSwGFuVYsHKkqwyMd/e1ZpaYch/W3XsC64DjczS6kLAYLrmVKjZJ\nU1RSuxIYAHwLTDazkcA3wHAzawCcS+Qn8fU5FqwcMQ7y8PUaoLWZFXb3v4P9evH/7d17rNd1Hcfx\n50vUVCBoOrVlhTe8C3k0b+WYIWKJOadNwAvJvOB0aaW5sFZbLZurpcNLamWtImp5zYzUlhqBYiRq\nBZiyyjIFywtesvTVH5/PseNP0IOc+J3f9/d6sN845/f7nu/nc9jhvH+f2/sNj9ievt472VCZikxg\nizVoGaltCexJWUvbGzgEmA7MomzV3hc4yvZDbepuDCItPztHAc8Aq4CfA1OBkyQtpaynzQQmtauv\n0UwJbPEaLb+YzgC2Bnaz/QQwt27bHg+cC1xk+6ft620MNi0bRaZQarGdC5xO2TF7BuVN0ibAZNvL\n29TVRkoS5KyxxWq0vNs+Ebgb2EbSnPr6zcAdlK3Z+V8UryHpPcCHgXHANsDjwFXAvrZn2p4CnGD7\n/vb1sqGyyJbAFv/TN6OIpB7KtNEVtm8AdgBGS5oNYPt64It1FBddTtLImh4LSXsCzwOTKcHtENsH\nAVcCcyQdB2B7Vbv6G82WqcgAXjP9eDSwCyU7xDhJd9teXDeJPCzpatvTerdsR3eTtCEwGjhc0tuB\nLcFDe+wAAAbXSURBVICptp+ru2i/Xy/9B/BVYEF7etodOnigNWAS2AJ41fTjRMpayKGU4HYccISk\nl+u00baStm1fT2MwqW+I/lM3g3wa2B841/Zz9ZINgUMl7UTZJDLO9l/a1N3G6/QcjwMlU5Hxipr3\ncQaw0Pa/bd8HXA8MBaZI2g0gi/0BUEdjE+unoyk5Hy8B9pI0CcD2LOAayhnHIxLUYn3IiK2Lreas\n0XJKlv7tJI2xvdj2vHqQ9mDKIdqIXhsBB0r6LIDt/SVtQdkJOUnSk5Q0WS8Cs3tzRcb/V3ZFJrB1\nrZY1tUmUelhPAmcCFwHH9E4/2v6lpLuSvy8AJG1t+++2H5f0GLArZVSG7ZWSbqT8PH0KGAN8IEFt\nPUpcy1Rkt5N0OqVY6PuAbwJn18dIYJqkXQES1AJA0s7A3yR9TdIU4HLKzscVki6tb5iWA7cAJwH7\n2V7Wxi5HF0pg6zKS3iVpqG3XjCIfoexgmwkcAJwGHEMpHjqEcv4ootcq4NeUKevpwGXACGAu8DQw\nS9LxlDdHT9v+a7s62q1yjC2BratI2gr4BDBD0rCa13EltXqx7X8CZwF71MTG59he2bYOx6Bj+xHK\ngf29KDtnbwOOp2TnvxHYHJgGzLL9Qpu6GV0uga27rAAWUrKof7QeyP4j8IN6Fgng3ZQsI0Mo6yQR\nwKsO8J8HmHJe7VGgB7ifsj77CHCi7d+3pZPxypb/gXh0qmwe6QKSdgQ2sL1U0vcoiYsPA062fZ6k\ny4A7JN1HSWg81fZLbexyDEJ1+rr3192DwFcoQe1s29fV9bfH6sg/2kLZFUkCW+NJ2hxYCqyU9Hng\nJUpS2hHADpJOtT1D0r6UpLRfzjm1WJO6k/ZFSd8FbgcusX1dfW1JWzsXUSWwNZztJySNB26lTD2P\nAeZQNgG8COxR34V/y/a/2tfT6CR19H8eMErSZn0yjUQbic6eQhwoCWxdwPYvJB0KXEwJbFtRDlwf\nSykfshMwG0hgi7WxgFJgNmJQSWDrErZvkfRJStXr/Wx/W9INlOwRm9l+qr09jE5je4mkYzNai8Em\nga2L2L5J0svAAkn7p+RMrKsEtcEnU5EJbF3H9s2SNgZuldSTVEcRzZJdkTnH1pVqkdD3J6hFRBNl\nxNalUr04ooE6/GD1QElgi4hoiE7P8ThQMhUZERGNkhFbRESTZMiWEVt0HkkvSbpX0gOSfiRps3W4\n1zhJP6kfH1Gzaazp2pG1ft3atvG5eoawX8+3XHO1pKPXoq1Rkh5Y2z5GNEkCW3Si522Ptb07JS3Y\naX1fVLHWP9u2b7B9wetcMhJY68AWsT5pAP90qgS26HR3UpI5j5K0VNJ3KNlV3ilpgqT5khbVkd0w\nAEkTJS2RtIg+KaEkTZM0q368laRrJS2ujwOAC4Dt62jxwnrdOZIWSrqvJpnuvddMScsk/YqSsux1\nSTq53mexpB+3jELHS7qn3u/wev0QSRf2afvUdf2HjGZI2ZoEtuhgtYbcYZRaYAA7Apfa3g14Fjgf\nGG97L+Ae4OOSNgGuBCZRSq5svYbbXwzcbnsMpajm7yh1yB6qo8VzJE2obb4XGAv0SDpIUg8lD+dY\n4IPAPv34dq6xvU9t7w+U6tS9RtU2PgRcXr+H6cBTtvep9z9Z0rb9aCei8bJ5JDrRppLurR/fCXyD\nUjz1T7YX1Of3A3YF5tUSYhsD84GdgeW2HwSo5VdOWU0bBwMnANTadE9JelvLNRPq47f182GUQDcc\nuLY33VTNyflGdpf0Bcp05zBgbp/XflgP0z8o6eH6PUwA9uyz/jaitr2sH21Fg3XwQGvAJLBFJ3re\n9ti+T9Tg9Wzfp4BbbE9uue5VX7eOBHzJ9tdb2jjrTdzrauBI24slTQPG9XnNLde6tn2m7b4BEEmj\n3kTb0SSJbJmKjMZaABwoaQcASUMljQaWUGqIbV+vm7yGr78NmFG/doikEcAzlNFYr7nASX3W7t4h\naUvgDuBISZtKGk6Z9nwjw4FHJW0ETG157RhJG9Q+b0cpHDsXmFGvR9JoSUP70U5E42XEFo1ke0Ud\n+cyW9Jb69Pm2l0k6BbhJ0nOUqczhq7nFx4ArJE2nVB2fYXu+pHl1O/3NdZ1tF2B+HTGuAo6zvUjS\nHGAx8DiwsB9d/gxwF7Ci/t23T38G7gbeCpxm+wVJV1HW3hbVQrErgCP7968TTdbJuxkHikql94iI\n6HQ9PXt73l33DNj9Nt1Iv7G994DdcD1JYIuIaAhJPwO2GMBbrrQ9cQDvt14ksEVERKNk80hERDRK\nAltERDRKAltERDRKAltERDRKAltERDRKAltERDRKAltERDRKAltERDRKAltERDTKfwFfJE/Zxf2V\nxgAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7f56683e2438>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plot(actual_value = df.loc[:,'Actual'].values.astype(int),\n",
    "     pred_value = df.loc[:,'Prediction'].values.astype(int))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:35:24.027709Z",
     "start_time": "2017-07-23T23:35:24.021275Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Actual\n",
       "0.0     9711\n",
       "1.0    12833\n",
       "Name: Actual, dtype: int64"
      ]
     },
     "execution_count": 32,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df_.groupby(by=\"Actual\").Actual.count()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:35:27.120778Z",
     "start_time": "2017-07-23T23:35:26.843239Z"
    }
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbYAAAGhCAYAAAAN2pFTAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xe8j/X/x/HH6wwrO0JGKGTvPRpCIpSRJqV869v6Nmj8\n+raVxrehrUUaSIpCGQ1R9migIikSScke57x+f3yuc/pYxxkf5ziX573bdfO53td6fZDXeb2v9/W+\nzN0REREJi7icDkBERCSWlNhERCRUlNhERCRUlNhERCRUlNhERCRUlNhERCRUlNhERCRUlNhERCRU\nlNhERCRUEnI6ABERiY34wie479kes/P59t8/cvczY3bCbKLEJiISEr5nO3mr9YrZ+XYseqZEzE6W\njZTYRERCw8B0h0m/AyIiEiqq2EREwsIAs5yOIscpsYmIhIm6ItUVKSIi4aKKTUQkTNQVqcQmIhIe\nGhUJ6ooUEZGQUcUmIhIm6opUYhMRCQ1DXZGoK1JEREJGFZuISGiYuiJRYhMRCRd1RaorUkREwkUV\nm4hImKgrUolNRCQ89IA2qCtSRERCRhWbiEhY6LU1gCo2EREJGVVsIiJhontsqthERMIjGDwSq+VQ\nVzN7xczWm9k3UW2PmNkyM/vKzN41s6JR224zs+Vm9p2ZdYhqb2hmXwfbhphF+lPNLK+ZjQraZ5tZ\nxfT8LiixiYhIZg0DztynbQpQy93rAN8DtwGYWQ2gN1AzOOZZM4sPjnkOuAKoEiwp5+wH/OnuJwGP\nAw+lJyglNhGRMImz2C2H4O7TgY37tE129z3B6iygXPC5KzDS3Xe6+0pgOdDEzMoAhd19lrs78BrQ\nLeqY4cHnMUDblGouLbrHJiISFkfe7P6XAaOCz2WJJLoUq4O23cHnfdtTjvkFwN33mNkm4FhgQ1oX\nVWITEZGDKWFm86LWh7r70PQcaGb/B+wB3jgskaVBiU1EJExi+xzbBndvlPEQrC/QGWgbdC8CrAHK\nR+1WLmhbwz/dldHt0cesNrMEoAjwx6Guf0TVrCIikhXZOyrygBGYnQkMBLq4+7aoTeOB3sFIx0pE\nBonMcfe1wN9m1iy4f3YJMC7qmD7B5x7Ax1GJ8qBUsYmISKaY2VvAqUS6LFcDdxEZBZkXmBKM85jl\n7le6+7dmNhpYQqSL8mp3TwpO9W8iIyzzA5OCBeBlYISZLScySKV3uuJKR/ITEZFcIK5wOc/b9NqY\nnW/H1FvnZ6YrMqepYhMRCZMja1RkjtDvgIiIhIoqNhGRsDDT7P6oYhMRkZBRxSYiEia6x6bEJiIS\nKuqKVFekiIiEixKbhJKZ5Tez981sk5m9nYXzXGhmk2MZW04xs9Zm9l1OxyGHU87PPHIkyL2RSyiY\n2QVmNs/MtpjZWjObZGatYnDqHkAp4Fh375nZk7j7G+7ePgbxHFZm5mZ2Ulr7uPvn7l4tu2KSHJIy\nMjIWSy6lxCY5xsxuBJ4AHiCShCoAzwBdYnD6E4Dvo94LdVQLJpAVOSoosUmOMLMiwL1E5osb6+5b\n3X23u3/g7gODffKa2RNm9muwPGFmeYNtp5rZajO7KXg1/VozuzTYdg9wJ3BeUAn2M7O7zez1qOtX\nDKqchGC9r5n9aGabzWylmV0Y1T4j6rgWZjY36OKca2YtorZ9amb3mdnM4DyTzazEQb5/SvwDo+Lv\nZmZnmdn3ZrbRzG6P2r+JmX1pZn8F+z5tZnmCbdOD3RYH3/e8qPPfYma/Aa+mtAXHnBhco0GwfryZ\n/W5mp2bpD1ZyVsr72NQVKZIjmgP5gHfT2Of/gGZAPaAu0AS4I2p7aSKvsShL5BXyz5hZMXe/i0gV\nOMrdC7r7y2kFYmbHAEOAju5eCGgBLDrAfsWBCcG+xwKPARPM7Nio3S4ALgWOA/IAN6dx6dJEfg/K\nEknELwIXAQ2B1sB/g1nQAZKAG4ASRH7v2hKZOBZ3bxPsUzf4vqOizl+cSPXaP/rC7r4CuAV43cwK\nAK8Cw9390zTilSOe7rGBEpvknGOJvOspra7CC4F73X29u/8O3ANcHLV9d7B9t7tPBLYAmb2HlAzU\nMrP87r7W3b89wD6dgB/cfYS773H3t4BlwNlR+7zq7t+7+3ZgNJGkfDC7gUHuvhsYSSRpPenum4Pr\nLyGS0HH3+e4+K7juT8ALwCnp+E53ufvOIJ69uPuLwHJgNlCGyA8SIrmeEpvklD+IvOoirXs/xwOr\notZXBW2p59gnMW4DCmY0EHffCpwHXAmsNbMJZnZyOuJJials1PpvGYjnj6jXdqQknnVR27enHG9m\nVc3sAzP7zcz+JlKRHrCbM8rv7r7jEPu8CNQCnnL3nYfYV3IDDR5RYpMc8yWwE+iWxj6/EulGS1Eh\naMuMrUCBqPXS0Rvd/SN3b0ekcllG5B/8Q8WTEtOaA+wba88RiauKuxcGbidyRyUtab6TyswKEhm8\n8zJwd9DVKrmduiKV2CRnuPsmIveVngkGTRQws0Qz62hmDwe7vQXcYWYlg0EYdwKvH+ych7AIaGNm\nFYKBK7elbDCzUmbWNbjXtpNIl2byAc4xEagaPKKQYGbnATWADzIZU0YUAv4GtgTV5FX7bF8HVM7g\nOZ8E5rn75UTuHT6f5ShFjgBKbJJj3P1/wI1EBoT8DvwCXAO8F+xyPzAP+Ar4GlgQtGXmWlOAUcG5\n5rN3MooL4viVyFt6T2H/xIG7/wF0Bm4i0pU6EOjs7hsyE1MG3UxkYMpmItXkqH223w0MD0ZN9jrU\nycysK3Am/3zPG4EGKaNBJRdTV6TeoC0iEhZxxSp63lPvOPSO6bTjvSty5Ru0VbGJiEioaDYCEZEw\nycVdiLGiik1EREJFFZuISIiYKjYltvj8RTyh8HE5HYbkYjXLFcnpECSXW7hg/gZ3L5nV8xhKbKDE\nRkLh4yh74ZM5HYbkYjMGd8zpECSXOyZv3L4z2kgWHPWJTUQkNIxDz0dzFFBiExEJDVNXJBoVKSIi\nIaOKTUQkRFSxKbGJiISKEpu6IkVEJGRUsYmIhIgqNiU2EZHw0HB/QF2RIiISMqrYRERCwvQcG6DE\nJiISKkps6ooUEZGQUcUmIhIiqthUsYmISMioYhMRCRFVbEpsIiLhoefYAHVFiohIyKhiExEJEXVF\nKrGJiISGHtCOUFekiIiEiio2EZEQUcWmxCYiEi7Ka+qKFBGRcFHFJiISFqauSFBiExEJFSU2dUWK\niEjIqGITEQkRVWxKbCIioaEHtCPUFSkiIqGixCYiEiYWw+VQlzJ7xczWm9k3UW3FzWyKmf0Q/Fos\natttZrbczL4zsw5R7Q3N7Otg2xALyk4zy2tmo4L22WZWMT2/BUpsIiKSWcOAM/dpuxWY5u5VgGnB\nOmZWA+gN1AyOedbM4oNjngOuAKoES8o5+wF/uvtJwOPAQ+kJSolNRCQsgufYYrUcirtPBzbu09wV\nGB58Hg50i2of6e473X0lsBxoYmZlgMLuPsvdHXhtn2NSzjUGaGvpCEyDR0REQiTGg0dKmNm8qPWh\n7j70EMeUcve1weffgFLB57LArKj9Vgdtu4PP+7anHPMLgLvvMbNNwLHAhrQCUGITEZGD2eDujTJ7\nsLu7mXksA0oPdUWKiIRIdnZFHsS6oHuR4Nf1QfsaoHzUfuWCtjXB533b9zrGzBKAIsAfhwpAiU1E\nJEyycVTkQYwH+gSf+wDjotp7ByMdKxEZJDIn6Lb828yaBffPLtnnmJRz9QA+Du7DpUldkSIikilm\n9hZwKpF7cauBu4DBwGgz6wesAnoBuPu3ZjYaWALsAa5296TgVP8mMsIyPzApWABeBkaY2XIig1R6\npycuJTYRkRDJzplH3P38g2xqe5D9BwGDDtA+D6h1gPYdQM+MxqXEJiISElm8NxYauscmIiKhosQm\nIpl2Zf/LOKFcKRrVr71X+9h33qZRvVoUzBfPgvnz9tr2yMMPUrt6FerVOpkpkz/KznCPCkfAqMgc\np8QmIpl20cV9ee/9Sfu116hRizdHvUOr1m32al+6dAljRo9i3qJveO/9Sdxw3dUkJSXtd7xknhKb\nEpuIZEGr1m0oXqz4fu0nV69O1WrV9mv/4P1x9Oh1Hnnz5qVipUpUPvEk5s2dkx2hylFEiU1Ess3a\nNWsoV+6fZ3TLlivLr7+uSeMIybCcf44txymxiYhIqGi4v4hkmzJly7J69S+p62tWr+H448umcYRk\nVG6+NxYrqthEJNt06tyFMaNHsXPnTn5auZIVy3+gUeMmOR1WeGTza2uOVEpsIpJpfS6+gNNOacEP\n339HlcrlGf7qywCMH/cuVSqXZ/asLzm3W2e6dIq8N7JGjZp079GThnVr0u3sjjz25NPEx8endQmR\nDLN0zCcZanlLVfGyFz6Z02FILvbN4I45HYLkcsfkjZufldfDpMhXuoqXu2hILEICYMX/zopJXNlN\n99hEREIjd3chxoq6IkVEJFRUsYmIhIgKNiW2o07f1hU5r1l5zGDUrF94dfpPqdsuaXUCF7c8gSR3\nPlmynoc++A6Aq9qeSM+m5UhOdu55dwmff7cBgJs6VuWcRmUpUiCR2rdNzomvI+lQvWolChYslDpI\n44khz9CseYuD7n9c8UKs37g5S9fsf/mlzJj+GYWLFCEuLo7Hn3yaps2aZ+gcE94fz9JlS7h5wK28\nP+49TqpalerVawBw3z130rJVG05ve0aW4gwjdUUqsR1VqpYuyHnNynPOEzPZneQM69+Yj5esZ9WG\nbTQ7qTjtapWi06Mz2JWUzLEF8wBwUqmCdK5fhjMf+pzjiuRlxJVNaPvgZyQ7TFuyntdmrOLj20/J\n4W8mhzJp8seUKFEiW685aPDDnHNuD6ZOmcy1V1/JnPmLM3R8p7O70OnsLgC8//44Op7VKTWx/feu\ne2Mer4SH7rEdRU4sVZDFP//Fjt3JJCU7s1dspEPt0gBc2OIEnp+2gl1JyQD8sWUXAO1qleKDhWvZ\nlZTM6o3bWbVhG3UrFAVg0aq/+H3zzpz5MpIlW7Zs4awOZ9CiaUMaN6jDB+PH7bfP2rVrad/2FJo1\nrk+j+rWZOeNzAKZOmcxpbVrQomlDLjq/F1u2bEnzWq1at+HHFcsBWLx4Eae2bk6ThnXp3fNc/vzz\nTwCefXoIDevWpEnDuvS5KPLuyhGvDePG669h1pdfMPGD8fzfrQNp1rg+P65YQf/LL+XdsWOY/NGH\nXHR+r9RrTf/sU7p3OztTcYaCRboiY7XkVkpsR5Hv126mcaXiFC2QSL7EOE6tXpIyRfMBUKnkMTSu\nXJyx17fgraubUqd8EQBKFcnLr39tTz3Hb5t2ULpIvhyJXzKvY/vTada4Pqe0agZAvnz5GPn2WL6Y\nPZ9Jkz/mtltuZt9Hf0aPfJMz2rVn1tyFzJ63iDp167FhwwYeHjyIDyZN4YvZ86nfsCFPPflYmtee\nOOF9ataKvNbmisv6cN+gwcyZv5iatWrxwP33APC/Rx/iizkLmDN/MU8+/dxexzdr3oKzOndh0OCH\nmTV3IZVPPDF12+ltz2Du3Nls3boVgHfeHkWPXudlKs4wMCAuzmK25FbqijyKrFi/lRc+WcHwfzVh\n+64klq75m+TgH7P4OKNIgUTOffIL6lQowlOX1OeUQZ/mbMASM/t2Rbo7d//3dmbM+Jy4uDh+/XUN\n69ato3Tp0qn7NGzUmKv692P37t107tKNunXrMWP6ZyxbuoS2p7YCYPeuXTRp1uyA1/y/Wwfy0IOD\nKFGiJM++8BKbNm1i06a/aN0m0nV94UV9uOiCSLVVq1YdLutzEZ27dOXsLt3S/b0SEhJo164DEye8\nzznn9uDDDydy/4MPZyhOCR8ltqPM6NmrGT17NQA3n1WV3/7aAUQqsY++/g2Ar37eRLI7xY/Jw7pN\nOzm+aP7U40sXycdvm3Zkf+ASUyPfeoMNGzYwc9Y8EhMTqV61Ejt37P3n2qp1GyZP+4wPJ03gX5df\nyrXX30CxosU4rW07ho9485DXSLnHlmLTpk0H3XfsuA+Y8fl0Jk54n0cGP8CcBV+l+7v06NWbF557\nhmLFitOgQSMKFSqEu6c7zrDJzV2IsaKuyKNMyqCQ44vmo0Pt0oxb8CsAU75eR7OTjgUi3ZKJ8XFs\n3LqLqd+so3P9MuSJj6Nc8fxULHkMi3/+K8fil9j4e9MmSpYsSWJiIp99+gk/r1q13z4/r1rFcaVK\ncWm/K+h7aT8WLVxA46bNmPXlTFYsj9wz27p1Kz98/326rlmkSBGKFi2Weq/urTdH0Kp1G5KTk1n9\nyy+ccupp3P/AQ2z6e9N+98MKFSzI5s0HHqnZus0pLFq0gGGvvESPXucBZCnO3E5zRapiO+o827cB\nRQsksifZuWvst2zesQeAt+f8wkO96zBpQGt2JyUz4K3IT8w/rNvChEVr+eiW1iQlO3e98y3Jwa2Y\nWzpXo0uD48mfGM/MO09j9OzVPPnRDzn11SQDzjv/Qnqe24XGDerQoGEjqlU7eb99pk//lCcee5TE\nxEQKFizIiy8Pp2TJkrzw4qv0veQCdu6MDBy66+77qFK1arquO/TlYVx/zVVs27aNSpUq8/yLr5CU\nlES/Sy9m06ZNuDtXXX0tRYsW3eu4Hr16c81V/Xnumad4462399oWHx9Px46deH3EcIa+PAwgy3FK\n7qa5IjVXpGSR5oqUrIrVXJH5j6/qJ/V7JhYhAfDN/e1z5VyR6ooUEZFQUVekiEhIGJp5BJTYjmhj\nr29BnoQ4ihZIJG9iHOs2Re4V/OuV+az5c/shjk6/E0oU4JPbT+W/Y77hjS9+BuD+HrWYu3Ij4+b/\nGrPrFCmQSKe6ZXjzy8g1yhTNx21nn8x1IxbF7BpyaKe0asbOnTv588+N7Ni+nTLBG6xHvf0uJ1Ss\nGPPr3XPXHRx7bAmuue4/XNb3Ys45pztnd917SP9lfS9m1hczKVwk8vxkoUKFmPLx9JjHEn65e9BH\nrCixHcHOffILALo3Lkvt8kW4e+ySA+4XZ6QO6Mis3//eyWVtKjFy1i8kZfVkB1G0QCIXtKiQmtjW\n/rVDSS0HfDZjFhCZ2WPh/Hk89uTTORxRxEOPPLZfwou2Z88eEhISDrqe3uMk/HSPLReKjzMWDWrH\nf7tVZ+LNrahboSgz7zyNQvki//PWO6EoI65sAkCBPPE83LsO7/6nBe/f2JLTaxx3wHP+vnknc1du\n5JyGZffbVrFEAYb1b8y4G1oy8upmVCp5TGr72OtbMGlAa27qWJVFg9oBUDBvAq9f1YTxN7Zk4s2t\nUq85sFM1Kh93DB/c1IqBnapxQokCfHBT5AHacTe0TD0vwOhrmlH9+ELpjl+y7pWXhnLbLTenrr/4\nwnPcfusAVixfTqN6tehz0fk0qFODiy84j+3bIz0G8+fNpcMZp9KyWSO6nX0W69ati2lM99x1B5df\n1oe2p7ai/+WXMuyVlzivxzl0bH86XTqdSXJyMrcMuJFG9WvTuEEd3h07BoCPp03lzHan0b3b2TRp\nUCemMR3pNKWWEluuVTh/InNWbOSsR2ewcNXBnyu7tv1JTF/2O+c88QUXPjeb/+tSnTwJB/5jf37a\nCq44rdJ+f6EH9arNne98S9fHZ/LIhO+4+9zIRLR3nVuTFz/9kY6PfM76qDkjd+xO4spXFtDlsZlc\n/Pwc7uhaHYCHJ3zHj+u30vl/M3h4wnd7XeODRb/SqV4ZIPIQeJECiSz9dXOG4pes6dGrN++Pe489\neyKPgIx4bRiX9LkMgKVLl3D1tdez4Ksl5M2Xj5dffIGdO3cy4Kb/8MbIMcycNY/eF1zIfXf/N9PX\nv2XAjTRrXJ9mjetz+WV9Utu//24ZEz6cyivDRgCwePFC3hz1DhM/msrYd97mu2XLmD1vEe9PnMwt\nA25k/fr1ACyYP48nhjzDgq8O3NMRVnqO7TB2RZqZA4+5+03B+s1AQXe/+3Bd8wAxDAM+cPcx2XXN\n7LJzTxIffX3on45bVyvJqdWP48q2kfn18ibGUbZYflb+vnW/fX/asI2lv26mc5BgAArlS6D+CUV5\ntm+D1Lb4YA65ehWKctmLcwEYv+BXbuoYeUbIDAZ2rkajSsVI9si9tGLHJKYZ54RFa3mpXyOenrKc\nTvXKMGnxbxmOX7KmcOHCtGzdhskfTqJi5crEx8dzcvXqrFi+nIoVK9GkaWRKqt7nX8irL79I61NO\nZemSb+ncMVKpJyUlUbZsuUxf/2BdkZ3P7kK+fP/MT9r2jHYUK1YMgC9nzqDneb2Jj4+ndOnSNG/R\nigXz55EnTx6aNG1O+QoVMh2P5F6Hs+N5J3CumT3o7hsyerCZJbj7nsMQVyjs3J2813pSsqdOWpo3\nqqIxiww2+fmPbek67zNTlvPExfVSq0Az2LhlF53/NyPdsZ3TqCyF8iVw9mMzSUp2Zt55GnkT4tM8\n5tc/d7B1Z1Lqa3JSHhDPaPySNX0v7cdTTz5OhRNO4OJL+qa27/vTu5nh7tSqXeewD/IoUOCYNNcP\n5phj0rdfqOTyLsRYOZx9OnuAocAN+24ws4pm9rGZfWVm08ysQtA+zMyeN7PZwMNmdreZDTezz81s\nlZmda2YPm9nXZvahmSUGx91pZnPN7BszG2q5uYbOpNUbt1O7XGRE2Zl1/pnIdvqy3+nT+oTU9Rpl\nC6d5nh/WbeHnDds49eSSAPy9fQ+/b95J+9qlgMj/NCcfXwiAxT//lfram7P3qvIS+WPLLpKSnVZV\nS1AmmGty6449HJP34AluwqK1XNX2RPIkxLF83ZZMxS9Z07xFS378cQXvjh1D957npbb/9NNK5s+L\nVOejR71F8xYtqV69Br+uWcO8uXMA2LVrF0uWfJut8bZo1Zoxo0eRnJzMunXrmPXlTBo0zHXPE8dM\nynD/o70r8nDfrHgGuNDMiuzT/hQw3N3rAG8AQ6K2lQNauPuNwfqJwOlAF+B14BN3rw1sBzoF+zzt\n7o3dvRaQH+h8WL7NEezJj37g3u41ee8/Ldid9E81N2TycvLniWfSgNZ8OLA113eocshzPT1lOccX\n+2fi4+teW8gFzSsw4eZWfDSwTeoAjnveXcK/Tq/MxJtbUe7YAmzeHimw35u/hgYVizFpQGs61y/D\nyvWRbsMNW3bxzeq/mTSgNQM7VdvvuhMXr6VLg+OZsGhtluKXrDnn3O60bNWGIkX++d/25JOrM+TJ\nx2lQpwbbt23jssv7kzdvXl4f+Ta3DryJJg3r0qJJA+bOmZ3p60bfY2vWuD5JSUnpiLUHVatVo0nD\nunTu2I7BD/+P447TAKOj3WGbUsvMtrh7QTO7F9hNJBEVdPe7zWwDUMbddwdV11p3LxHcE/vE3YcH\n57gb2O3ug8wsLjhHPnf34Lwb3f0JM+sODAQKAMWBp9x98MHusZlZf6A/QHyhkg0rXD7ssPwehF3+\nPPFs3xX5x6drw+PpULs0/x62IIejyn5hm1Kra+eO3Dzw1tTXy6xYvpwLz+/JrLkLcziy8IrVlFrH\nlK3m1a96PhYhATD/v6fnyim1suPhjieABcCr6dx/31EBOwHcPdnMdvs/mTgZSDCzfMCzQCN3/yVI\nhmm+CdPdhxLpJiVvqSpH92SZWVCnfBH+260GcQabtu9m4Fvpf9WIHHn++OMPTmvdnPoNG6UmNcl9\ncnMXYqwc9sTm7hvNbDTQD3glaP4C6A2MAC4EPs/CJVKS2AYzKwj0AEI3CvJINHvFxgwNKpEj27HH\nHstXS/Z/tcuJJ52kak1ylex6HP9/wDVR69cCr5rZAOB34NLMntjd/zKzF4FvgN+AuVkJVEQkN1PB\ndhgTm7sXjPq8jsj9r5T1VUQGhOx7TN991u9O45x3R32+A7jjUOcTEQk1U1ckaOYREREJGc0MKiIS\nEpHn2HI6ipynik1EREJFFZuISGjk7hlDYkWJTUQkRJTX1BUpIiIho4pNRCRE1BWpxCYiEh56bQ2g\nrkgREQkZVWwiIiGR8j62o50Sm4hIiCixqStSRERCRhWbiEiIqGBTYhMRCRV1RaorUkREQkaJTUQk\nLILn2GK1pOuSZjeY2bdm9o2ZvWVm+cysuJlNMbMfgl+LRe1/m5ktN7PvzKxDVHtDM/s62DbEslB6\nKrGJiEimmFlZ4DqgkbvXAuKB3sCtwDR3rwJMC9YxsxrB9prAmcCzZhYfnO454AqgSrCcmdm4lNhE\nRELCgtn9Y7WkUwKQ38wSgALAr0BXYHiwfTjQLfjcFRjp7jvdfSWwHGhiZmWAwu4+y90deC3qmAxT\nYhMRCZEYd0WWMLN5UUv/6Gu5+xrgUeBnYC2wyd0nA6XcfW2w229AqeBzWeCXqFOsDtrKBp/3bc8U\njYoUEZGD2eDujQ62Mbh31hWoBPwFvG1mF0Xv4+5uZn54w9ybEpuISIjEZe9w/zOAle7+O4CZjQVa\nAOvMrIy7rw26GdcH+68BykcdXy5oWxN83rc9U9QVKSISItk8KvJnoJmZFQhGMbYFlgLjgT7BPn2A\nccHn8UBvM8trZpWIDBKZE3Rb/m1mzYLzXBJ1TIapYhMRkUxx99lmNgZYAOwBFgJDgYLAaDPrB6wC\negX7f2tmo4Elwf5Xu3tScLp/A8OA/MCkYMkUJTYRkZCIVFrZO/OIu98F3LVP804i1duB9h8EDDpA\n+zygVixiUmITEQmROM2opXtsIiISLqrYRERCRJMgK7GJiISK8pq6IkVEJGRUsYmIhIQRmS/yaKfE\nJiISIhoVqa5IEREJGVVsIiJhkbHXzYSWKjYREQkVVWwiIiGigk2JTUQkNIxsf23NEUldkSIiEiqq\n2EREQkQFmxKbiEioaFSkuiJFRCRkVLGJiIRE5EWjOR1FzlNiExEJEY2KVFekiIiEjCo2EZEQUb2W\nRmIzs8JpHejuf8c+HBERyQqNiky7YvsWcPb+ASBl3YEKhzEuERGRTDloYnP38tkZiIiIZE1kSq2c\njiLnpWvwiJn1NrPbg8/lzKzh4Q1LREQyLHhtTayW3OqQic3MngZOAy4OmrYBzx/OoERERDIrPaMi\nW7h7AzNbCODuG80sz2GOS0REMiEXF1oxk56uyN1mFkdkwAhmdiyQfFijEhERyaT0VGzPAO8AJc3s\nHqAXcM+wwjPxAAAgAElEQVRhjUpERDIlN98bi5VDJjZ3f83M5gNnBE093f2bwxuWiIhklEZFRqR3\n5pF4YDeR7khNwyUiIkes9IyK/D/gLeB4oBzwppnddrgDExGRjNNw//RVbJcA9d19G4CZDQIWAg8e\nzsBERCTjcm86ip30dCuuZe8EmBC0iYiIHHHSmgT5cSL31DYC35rZR8F6e2Bu9oQnIiLpZab3sUHa\nXZEpIx+/BSZEtc86fOGIiEhWKK+lPQnyy9kZiIiISCwccvCImZ0IDAJqAPlS2t296mGMS0REMiE3\nj2aMlfQMHhkGvEpksE1HYDQw6jDGJCIimWQWuyW3Sk9iK+DuHwG4+wp3v4NIghMRETnipOc5tp3B\nJMgrzOxKYA1Q6PCGJSIiGWWYRkWSvsR2A3AMcB2Re21FgMsOZ1AiIiKZlZ5JkGcHHzfzz8tGRUTk\nSJPL743FSloPaL9L8A62A3H3cw9LRCIikmkaFZl2xfZ0tkWRg2qVK8LMh8/K6TAkFyvW+JqcDkFE\noqT1gPa07AxERESyTu8VS//72ERE5AhnqCsSlNxFRCRk0l2xmVled995OIMREZGsiVPBlq43aDcx\ns6+BH4L1umb21GGPTEREMizOYrfkVunpihwCdAb+AHD3xcBphzMoERGRzEpPV2Scu6/a54Zk0mGK\nR0REMikyeXEuLrViJD2J7RczawK4mcUD1wLfH96wREQkM3JzF2KspKcr8irgRqACsA5oFrSJiIgc\ncQ6Z2Nx9vbv3dvcSwdLb3TdkR3AiIpIx2f0+NjMramZjzGyZmS01s+ZmVtzMppjZD8GvxaL2v83M\nlpvZd2bWIaq9oZl9HWwbYlnoU03PG7Rf5ABzRrp7/8xeVEREYs8gJ15b8yTwobv3MLM8QAHgdmCa\nuw82s1uBW4FbzKwG0BuoCRwPTDWzqu6eBDwHXAHMBiYCZwKTMhNQeroipwLTgmUmcByg59lERI5y\nZlYEaAO8DODuu9z9L6ArMDzYbTjQLfjcFRjp7jvdfSWwHGhiZmWAwu4+y90deC3qmAxLz2trRu3z\nRUYAMzJ7QREROXxiPJ1UCTObF7U+1N2HRq1XAn4HXjWzusB84HqglLuvDfb5DSgVfC4LzIo6fnXQ\ntjv4vG97pmRmrshK/BOkiIiE1wZ3b5TG9gSgAXCtu882syeJdDumcnc3s4O+Au1wSM89tj/55x5b\nHLCRfQIXEZEjQzbfYlsNrI56IfUYIvlhnZmVcfe1QTfj+mD7GqB81PHlgrY1wed92zMlzao1GJVS\nFygZLMXcvbK7j87sBUVE5PAwM+JiuByKu/9G5FnnakFTW2AJMB7oE7T1AcYFn8cDvc0sr5lVAqoA\nc4Juy7/NrFmQdy6JOibD0qzYghJyorvXyuwFREQk1K4F3ghGRP4IXEqkaBptZv2AVUAvAHf/1sxG\nE0l+e4CrgxGRAP8GhgH5iYyGzNSISEjfPbZFZlbf3Rdm9iIiIpI9snu0v7svAg50H67tQfYfBAw6\nQPs8ICZF1EETm5kluPseoD4w18xWAFuJPCrh7t4gFgGIiEjsaEqttCu2OURGu3TJplhERESyLK3E\nZgDuviKbYhERkSzIoZlHjjhpJbaSZnbjwTa6+2OHIR4REckC5bW0E1s8UJCgchMREckN0kpsa939\n3myLREREssY0eATScY9NRERyD9M/3WnOPHLAZxBERESOZAet2Nx9Y3YGIiIiWRMZFZnTUeS8zMzu\nLyIiRygltpi/ukdERCRnqWITEQkR04NsqthERCRcVLGJiISEBo9EKLGJiISFaUotUFekiIiEjCo2\nEZEQ0ez+SmwiIqGhe2wR6ooUEZFQUcUmIhIi6olUYhMRCREjTrP7qytSRETCRRWbiEhIGOqKBFVs\ncgB//fUX55/Xg7q1TqZe7erM+vLLnA5JRNIjeIN2rJbcShWb7OfmG66nffszeWvUGHbt2sW2bdty\nOiQRkXRTYpO9bNq0iRkzpvPiK8MAyJMnD3ny5MnZoEQk3fSAtroiZR8/rVxJiRIl6d/vUpo1qs9V\n/S9n69atOR2WiEi6KbHJXvbs2cOihQu44l9XMWveQgoccwyPPjw4p8MSkXRIGTwSqyW3UmKTvZQt\nV46y5crRpGlTAM7p3oNFCxfkcFQikl5xZjFbcislNtlL6dKlKVeuPN9/9x0An348jZOr18jhqERE\n0k+DR2Q/jz3xFJdeciG7du2iYuXKDH3p1ZwOSUTSKRcXWjGjxCb7qVuvHjNnz8vpMEQkgwx1w4F+\nD0REJGRUsYmIhIWBqS9SiU1EJEyU1pTYjkjVTqpIoYKFiI+PB+CJp56leYsWB92/RNGCbPhrS5au\necVlfZk2bQpLv/+RvHnzsmHDBlo2a8R3y3/K0nn3NX7ce1SpUpXqNSIjLe+9+05atW7D6W3PiOl1\nJHaev+tCOrapxe8bN9Oo5wOp7Q/8pxtntanFrt1JrFy9gf53vc6mLdtJSIjjuTsvpN7J5UmIj+ON\nCXN49JXJ5M+XyBsP96NyuRIkJTsTp3/Nf4eMB+Dhm86lTeOqABTIl4eSxQtSps3AHPm+kvspsR2h\nPpz6CSVKlMjWa8bHxzP81Vfof+VVh+0a7497j46dOqcmtjvvvvewXUtiY8T7s3h+1Ge8dN8le7VP\nm7WM/z41nqSkZO6/risDLmvPHUPG0f2MBuTNk0DjXg+QP18iC9+5g9GT5vH7n5t54rVpTJ/3A4kJ\n8Ux64Vrat6zB5JlLGPi/sannvar3KdStVi67v2YoGJpSCzR4JNfYsmULHdu3pXnjBjSqV5v3x4/b\nb5+1a9dyxmltaNqwHg3r1WLGjM8BmDplMqe0ak7zxg24oHdPtmw5cHV3zbX/4akhj7Nnz579tj32\nv0do2awxjevX4b577kptf3DQfdSpWY3TT2nFJRedz+OPPQrAKy+9SMtmjWnSoC69e3Vn27ZtfPnF\nF0z4YDy33zqApg3r8eOKFVxxWV/GvjOGyR99yAW9e6aed/pnn3Ju184Zil8Oj5kLVrBx0/4TYU+b\ntYykpGQA5ny9krKligLgOAXy5SE+Po78efOwa3cSm7fuYPuO3Uyf9wMAu/cksWjZL5Q9ruh+5+11\nZkNGfzj/MH6jcLMYLrmVEtsR6swzTqNpw3q0bhGZASRfvnyMGvMuX85dwIdTP+HWgTfh7nsdM2rk\nm7Rr34HZ8xcxZ/5i6tatx4YNGxj8wP1M/GgqX85dQIOGjRjyxGMHvGb5ChVo0aIVb74+Yq/2qVMm\ns+KHH5jx5Rxmz1/EwgXzmfH5dObNnct7Y99hzvzFjPtgEgvm//OIQNdzzmXmrLnMWbCYk0+uzrBX\nXqZ5ixZ06tyFBwY/wuz5i6h84omp+5/e9gzmzpmdOi/lmNGj6Nmrd4bil5xzSdfmfDRzCQBjpy5k\n245drJwyiO8n3csTr03jz7/3ToxFCubnrDa1+WTOd3u1VyhTjBOOP5ZP5+7dLpIR6oo8Qu3bFenu\n3HnH7cz8fDpxcXH8umYN69ato3Tp0qn7NGrUmH9dcRm7d+/m7C7dqFuvHp9P/4xlS5dwepuWAOza\nvYumTZsf9LoDbrmNnt27cuZZnVLbpk6ZzNSpk2nWqD4AW7ZuYfkPP7B582Y6d+lKvnz5yJcvH2d1\nOjv1mCXffsPdd97Bpr/+YsvWLbRr1yHN75uQkED79mcy4YP3Obd7DyZNmsCgwQ9nOH7JfgP7dSAp\nKZmRE+cC0LhmRZKSkqnc/v8oVqgAU1+5gY9nL+OnNX8AEB8fx/DBfXn2rU9T21L07NCQ96YtIjnZ\n97uOpI96IpXYco2Rb77Bhg2/88Wc+SQmJlLtpIrs3LFjr31atW7DlI+n8+HECfTv15fr/nMjRYsV\n4/Qz2vHa62+l6zonValCnbr1eOft0alt7s6Agbdxef9/7bXvU08+cdDzXNGvL6PHvEedunUZMXwY\n0z/79JDX7nleb5579mmKFy9Og4aNKFSoEO6eofgle110dlPOalOLjv8aktrWq2MjJn+xhD17kvn9\nzy18uehHGtaokJrEnrnjfFb8/DtPv/npfufr0aEhNwwevV+7pJdpuD/qisw1Nm3aRMmSx5GYmMhn\nn37Cz6tW7bfPqlWrKFWqFJddfgV9L7uchQsX0KRpM778YiYrli8HYOvWrfzw/fdpXuuWW/+PJx5/\nNHW9XfsODB/2Suq9rTVr1rB+/Xqat2jJxA/eZ8eOHWzZsoVJEz9IPWbL5s2ULlOG3bt3M/KtN1Lb\nCxYqxJbNmw943dZtTmHRwgW88vKL9OzVGyBT8Uv2aNeiOjf2PYMe/3mB7Tt2p7av/m0jpzauBkRG\nODapU5HvfloHwF3/7kyRQvm5+ZF39jtf1YqlKFa4ALMWr8yeLyChpYotl+h9wYV073Y2jerVpkHD\nRlQ7+eT99vn8s095/LFHSExI5JiCBXn51dcoWbIkL748jEsuOp9dO3cCcNe991OlatWDXqtGzZrU\nq98gdVb/M9q1Z9nSpZzaKtIFeEzBgrw6/HUaNW5Mp7O70LhBHY47rhQ1a9WmSOEiANx59320admU\nEiVK0rhJ09Rk1rNXb66+6gqefXoIb44as9d14+Pj6XhWZ15/bRgvvTIcIFPxS2wNf7AvrRtWoUTR\ngiz/8D7ue34iw9/7ksdv6UXePAl88Nw1AMz5+ieuGzSS50dNZ+g9FzF/zP9hBiPGzeKbH36l7HFF\nufWKM1n24298+dYtADw/6jOGvfslEOmGfPsjDRrJCk2pFWH7DkA42jRs2Mg1L2LmbdmyhYIFC7Jt\n2zbandaGp58bSv0GDXI6rGxVrPE1OR2C5HI7Fj0z390bZfU8J9ao6w+8MTEWIQHQu0G5mMSV3VSx\nSZZcfVV/li1Zwo6dO7jo4j5HXVITOdLoHpsSm2TR8BFv5nQIIhJFaU2JLVdq3aIpu3buZOOfG9mx\nfTvHH18WgNHvvMcJFSvG/Hp333kHxx5bgmuv/89+7a8Nf5WSJUqmtk399HMKFSoU8xgk66a/djN5\n8iRQvHAB8uVL5Nf1mwDodcNQfl67MWbXqVy+BPNG3873q9aTJzGez+b+kKmRjuOfuZoLBrxEYkI8\n3ds34KUxMwAoV6ooD95wDhffqvcEyoEpseVCn38xG4ARw4cxf/48nhjydI7FcsONA/ZLeNH27NlD\nQkLCQdcPxt1xd+LidCs8VtpcEhnpetHZTWlYowI3PPT2AfeLi7MsP0f2/ar1NOs9mISEOCa/eD2d\nTqnNhM++ztA5ulz9DBBJlJf3aJWa2Fav+0tJ7WA0uz+gATSh8vKLQ7l14M2p60Off47bbhnAiuXL\naVC3Jhdf2Jt6tatz4fm92L59OwDz5s6l3emn0KJJQ7p27si6deuyHMerL79Ez+7d6HDGaZx9Vgc+\nnjaV9m1P5dyunWlUvzYA/3v0YRrWq0XDerV49umnAFixfDn169Sg78UX0qBuTdauXZvlWOTQ4uPj\nWDv9YR65uTtzRt1G41oVWf7hfRQpmB+AJrUrMuH5yACZY/LnYeg9F/H5iJv58q1bOKtNrTTPvWdP\nMrO/+okTy5fEzHjopnOZ9/btzB19O+ecUQ+A40sWYdorNzBr5K3Me/t2mtWtBJAaw/3XdaXqCccx\na+St3HddFyqXL8GskbcCMOONgVQ54bjU60175QbqVC2b4TjDImVUZKyW3EoVW4j0PK83zRrX5/4H\nBpOQkMBrw19NHTa/dMkSnnvhZZo2a0a/vpfw0tAX6H/lVdx84/WMeXc8JUqU4K033+Deu/7LM88P\nTfc1H3/sEV5/bRgAx5YowcSPpgKweNFCZs9bRLFixfh42lQWzJ/Hgq+WUKFCBebMns2oN99gxpdz\n2bNnD61bNKHNKaeSP39+vlu2jJdeeY2GjXLdQKxcrWihAsxYsJwBj+7/fFm02/t3ZMoXS+l/1+sU\nLZSf6SMGMG3WMnbu2n9+UYg8x3ZK46rc8eQ4urerT7VKpWhy3oOULFaQGa8PZMb85ZzfqTETp3/N\n/4ZNJS7OyJ83ca9z3DFkHJXLl6RZ78FApIJL8c5H8+nevgGDX/yQsscVpViRAnz1/RoGXd81Q3FK\nuGR7YjOzbsC7QHV3X2ZmFYEW7v5msL0ecLy7Z2rMqpn9BDRy9w2xiTj3KFy4MK1ateGjDydRqVJl\n4uPjObl6dVYsX07FSpVo2qwZAOdfeBEvvzSUNqecytIl39KpQ+SVMUlJSZQtl7FZ1Q/WFXnGGe0p\nVqxY6nrTZs2pUKECAF98MYNu53Ynf/5IRXB2l27MnPE5Z7RrT+UTT1RSywE7d+1m3MeLD7lf2+bV\nad+yJjdd2g6AfHkSKF+6OMt/Xr/XfikVVnKyM/6TxXw8exmP3dKT0R/OJznZWffHZr5YtIIGNSsw\n79ufefqO3uTNk8j7n37F19+vSXfc70xZwJgnrmTwix/So0MDxk5ZmKE4w0hdkTlTsZ0PzAh+vQuo\nCFwApAyvqwc0AmL3MMZRpO9llzPkycc44YSKXNLn0tT2ff+ymxnuTq3adZj26ecxj6PAMcekuX4w\nxxRI334SW9t37t5rfU9SMnFxkb8zefP8U0GZQa8bh7Jyddo/N6bcY0uPz+Z+T4fLn+TM1rV46b6L\neXzYVEZOSt+zpT+v/ZOt23dycuXS9GjfgCvuej1DcYaR0lo2d6OaWUGgFdAP6B00DwZam9kiM7sF\nuBc4L1g/z8yamNmXZrbQzL4ws2rBueLN7FEz+8bMvjKza/e5Vn4zm2RmV2TjV8xxLVq2ZOWKFYx9\n52169Dovtf2nlSuZNzcySe2ot96kRYtWVK9Rg19/XcPcOXMA2LVrF0u+/fawx9iyZWvGv/cu27dv\nZ8uWLXzw/jhatmp92K8r6bfq143Urx6psFPuhQFM/WIp/+59Sup6Rt6bNnPBcnp2aIiZcVzxQjSv\nW5kF3/5MhTLF+O2Pv3ll7ExGjJtF3ZPL73Xclq07KVQg70HPO+ajBQy4tD158iSw7Mffshyn5H7Z\nXbF1BT509+/N7A8zawjcCtzs7p0BzGwdka7Ea4L1wkBrd99jZmcADwDdgf5Eqr16wbbiUdcpCIwE\nXnP317Lryx0pzuneg++WLaNIkSKpbSdXr86QJx/jq8WLqFmrNv2u6E/evHl5c+QYbrrhOjb//TdJ\nyUlc/5+bqFGzZrqvFX2PDWDMe+8f8pjGTZrQs/f5tGreGIAr+l9Frdq1U+eDlJx3//MTefbO89m0\neTszFvzz5zLohUk8MqA7c0ffTlycseKX3+l1Q/ruyY6duogmdSoxd/RtuMMtj43l9z+3cEnXZlx3\n0ens3pPElm076XfH8L2OW79xMwuX/sLc0bfz4YxvePXdL/Y570Ieuulc7n1uQkzizO1yoifSzOKB\necAad+8c/Hs8isi/0T8Bvdz9z2Df24gUN0nAde7+UdDeEBgG5CfSY3e9Z3JqrGydUsvMPgCedPcp\nZnYdUAH4gL0TW1/2TmzlgSFAFcCBRHc/2czeAZ539yn7XOMnYBPwsLu/wQGYWX8iiZHyFSo0/H7F\n/hMK52ZdOp3JgFtuo3WbyE+sK5Yv54LzejB7/qIcjiycNKWWZFWsptSqUrOuPzZycixCAqBLndLp\nisvMbiRyC6lwkNgeBja6+2AzuxUo5u63mFkN4C2gCXA8MBWo6u5JZjYHuA6YTSSxDXH3SZmJO9u6\nIoMMfjrwUpB8BgC9OHSX8H3AJ+5eCzgbyJeOy80EzrSD3EV196Hu3sjdG0U/XJzb/fHHH9SqXoWi\nxYqlJjURkcPJzMoBnYCXopq7Aiml93CgW1T7SHff6e4rgeVAEzMrQyQpzgqqtNeijsmw7LzH1gMY\n4e4nuHtFdy8PrASSgeipKjbvs14ESBkm1TeqfQrwLzNLgNTEmeJO4E/gmZh+gyPcscceyzdLf9jv\n3WUnnnSSqjWRo4RZ7BaghJnNi1r6H+CSTwADifxbnqKUu6c8iPobUCr4XBb4JWq/1UFb2eDzvu2Z\nkp2J7Xwiw/yjvUNkEEmSmS02sxuAT4AaKYNHgIeBB81sIXvfE3wJ+Bn4yswWExlZGe16IH9QEouI\nHAUspv8BG1J6t4JlrxuVZtYZWO/uB33fUFCBZetrZLJt8Ii7n3aAtiEH2hdovM969Mu37giO3QPc\nGCzR56wYtXopIiJyuLQEupjZWURuExU2s9eBdWZWxt3XBt2MKQ8QrgGih72WC9rWBJ/3bc+U3Dxr\nioiI7CPGXZFpcvfb3L1cUFD0Bj5294uA8UCfYLc+wLjg83igt5nlNbNKRAYFzgm6Lf82s2bB2IhL\noo7JME2pJSIisTYYGG1m/YBVRAYK4u7fmtloYAmwB7ja3ZOCY/7NP8P9JwVLpiixiYiERGQS5JyZ\ne8TdPwU+DT7/AbQ9yH6DgEEHaJ8HxGS2aiU2EZGwSGcXYtjpHpuIiISKKjYRkRBRxabEJiISKqb5\n/dUVKSIi4aKKTUQkJAyIU8GmxCYiEibqilRXpIiIhIwqNhGRENGoSCU2EZFQUVekuiJFRCRkVLGJ\niISERkVGqGITEZFQUcUmIhIapntsKLGJiISHZvcH1BUpIiIho4pNRCREVLApsYmIhEZkVKRSm7oi\nRUQkVFSxiYiEiOo1JTYRkXBRZlNXpIiIhIsqNhGRENED2kpsIiKhokGR6ooUEZGQUcUmIhIiKtiU\n2EREwkWZTV2RIiISLqrYRERCwtCoSFDFJiIiIaOKTUQkLPQ+NkCJTUQkVJTX1BUpIiIho4pNRCRM\nVLIpsYmIhIdpVCTqihQRkZBRxSYiEiIaFanEJiISGoZusYG6IkVEJGRUsYmIhIlKNiU2EZEw0ahI\ndUWKiEjIqGITEQkRjYpUYhMRCRXlNXVFiohIyKhiExEJCz3IBqhiExGRkFHFJiISIhrur8QmIhIa\nhkZFgroiRUQkZFSxiYiEiAo2JTYRkXBRZlNXpIiIZI6ZlTezT8xsiZl9a2bXB+3FzWyKmf0Q/Fos\n6pjbzGy5mX1nZh2i2hua2dfBtiFmmb9bqMQmIhIiFsP/0mEPcJO71wCaAVebWQ3gVmCau1cBpgXr\nBNt6AzWBM4FnzSw+ONdzwBVAlWA5M7O/B0psIiIhYha75VDcfa27Lwg+bwaWAmWBrsDwYLfhQLfg\nc1dgpLvvdPeVwHKgiZmVAQq7+yx3d+C1qGMyTIlNRESyzMwqAvWB2UApd18bbPoNKBV8Lgv8EnXY\n6qCtbPB53/ZM0eAREZEQifHYkRJmNi9qfai7D93vmmYFgXeA/7j739G3x9zdzcxjG1balNhERMIk\ntpltg7s3SvNyZolEktob7j42aF5nZmXcfW3Qzbg+aF8DlI86vFzQtib4vG97pqgrUkREMiUYufgy\nsNTdH4vaNB7oE3zuA4yLau9tZnnNrBKRQSJzgm7Lv82sWXDOS6KOyTBVbCIiIRGZ3D9bH2RrCVwM\nfG1mi4K224HBwGgz6wesAnoBuPu3ZjYaWEJkROXV7p4UHPdvYBiQH5gULJmixCYiIpni7jM4eOdn\n24McMwgYdID2eUCtWMSlxCYiEhbpHKYfdkpsIiIhorymwSMiIhIyR33FtmDB/A35E21VTsdxhCsB\nbMjpICRX09+htJ0QszOpZFNic/eSOR3Dkc7M5h3qWRaRtOjvUHZJ9xyPoaauSBERCZWjvmITEQkT\njYpUYpP02W9uOJEM0t+hbGDoFhuoK1LS4UCTnopkhP4OSXZSxSYiEiYq2ZTYRETCRKMi1RUpIiIh\no4pNssTMqgNlgM/dfXdOxyO5h5mZu2frCyiPBhoVqcQmWdebyIsDk8zsCyU3Sa+UpGZmzYCf3P23\nHA4pFJTX1BUpWXcP8BNwHtAqeJuuyEGZWX0zyxN8PpHIK0z25GxUEiZKbJJhwRtuAXD3ZCL/MK1F\nyU3S527g/SC5rQQ2AbsAzCzOzOJzMLbcLXhtTayW3EqJTTIk+r6ImbU3s1OBosD9wM9EklsLJTfZ\nl5nFAbh7V+BPYDRQkEjFXyDYlgzkyaEQJSR0j00yJCqp3QicQ+QV71cAL7n7A2Z2C9AfSAJm5Fig\nckQJfiBKDj6XdPfeZjYO+JLI35UyZpYEJAJrzew2d9+egyHnYrm41IoRJTbJMDM7AzjN3Vub2YNA\nE+B8M8PdHzKzG4DlORulHEmifiC6DmhkZle5e1czex5oCzwMxBOp/r9TUsscI3d3IcaKEpsc0gGG\nZf8CXGtmfYHGwFnA48DdZpbo7o/nQJhyhDOzc4A+QGd33wrg7lea2dvAfUA3d9cgEsky3WOTNO1z\nT62pmRUDVrr7T0AV4Dl3Xwt8BSwGFuVYsHKkqwyMd/e1ZpaYch/W3XsC64DjczS6kLAYLrmVKjZJ\nU1RSuxIYAHwLTDazkcA3wHAzawCcS+Qn8fU5FqwcMQ7y8PUaoLWZFXb3v4P9evH/7d17rNd1Hcfx\n50vUVCBoOrVlhTe8C3k0b+WYIWKJOadNwAvJvOB0aaW5sFZbLZurpcNLamWtImp5zYzUlhqBYiRq\nBZiyyjIFywtesvTVH5/PseNP0IOc+J3f9/d6sN845/f7nu/nc9jhvH+f2/sNj9ievt472VCZikxg\nizVoGaltCexJWUvbGzgEmA7MomzV3hc4yvZDbepuDCItPztHAc8Aq4CfA1OBkyQtpaynzQQmtauv\n0UwJbPEaLb+YzgC2Bnaz/QQwt27bHg+cC1xk+6ft620MNi0bRaZQarGdC5xO2TF7BuVN0ibAZNvL\n29TVRkoS5KyxxWq0vNs+Ebgb2EbSnPr6zcAdlK3Z+V8UryHpPcCHgXHANsDjwFXAvrZn2p4CnGD7\n/vb1sqGyyJbAFv/TN6OIpB7KtNEVtm8AdgBGS5oNYPt64It1FBddTtLImh4LSXsCzwOTKcHtENsH\nAVcCcyQdB2B7Vbv6G82WqcgAXjP9eDSwCyU7xDhJd9teXDeJPCzpatvTerdsR3eTtCEwGjhc0tuB\nLcFDe+wAAAbXSURBVICptp+ru2i/Xy/9B/BVYEF7etodOnigNWAS2AJ41fTjRMpayKGU4HYccISk\nl+u00baStm1fT2MwqW+I/lM3g3wa2B841/Zz9ZINgUMl7UTZJDLO9l/a1N3G6/QcjwMlU5Hxipr3\ncQaw0Pa/bd8HXA8MBaZI2g0gi/0BUEdjE+unoyk5Hy8B9pI0CcD2LOAayhnHIxLUYn3IiK2Lreas\n0XJKlv7tJI2xvdj2vHqQ9mDKIdqIXhsBB0r6LIDt/SVtQdkJOUnSk5Q0WS8Cs3tzRcb/V3ZFJrB1\nrZY1tUmUelhPAmcCFwHH9E4/2v6lpLuSvy8AJG1t+++2H5f0GLArZVSG7ZWSbqT8PH0KGAN8IEFt\nPUpcy1Rkt5N0OqVY6PuAbwJn18dIYJqkXQES1AJA0s7A3yR9TdIU4HLKzscVki6tb5iWA7cAJwH7\n2V7Wxi5HF0pg6zKS3iVpqG3XjCIfoexgmwkcAJwGHEMpHjqEcv4ootcq4NeUKevpwGXACGAu8DQw\nS9LxlDdHT9v+a7s62q1yjC2BratI2gr4BDBD0rCa13EltXqx7X8CZwF71MTG59he2bYOx6Bj+xHK\ngf29KDtnbwOOp2TnvxHYHJgGzLL9Qpu6GV0uga27rAAWUrKof7QeyP4j8IN6Fgng3ZQsI0Mo6yQR\nwKsO8J8HmHJe7VGgB7ifsj77CHCi7d+3pZPxypb/gXh0qmwe6QKSdgQ2sL1U0vcoiYsPA062fZ6k\ny4A7JN1HSWg81fZLbexyDEJ1+rr3192DwFcoQe1s29fV9bfH6sg/2kLZFUkCW+NJ2hxYCqyU9Hng\nJUpS2hHADpJOtT1D0r6UpLRfzjm1WJO6k/ZFSd8FbgcusX1dfW1JWzsXUSWwNZztJySNB26lTD2P\nAeZQNgG8COxR34V/y/a/2tfT6CR19H8eMErSZn0yjUQbic6eQhwoCWxdwPYvJB0KXEwJbFtRDlwf\nSykfshMwG0hgi7WxgFJgNmJQSWDrErZvkfRJStXr/Wx/W9INlOwRm9l+qr09jE5je4mkYzNai8Em\nga2L2L5J0svAAkn7p+RMrKsEtcEnU5EJbF3H9s2SNgZuldSTVEcRzZJdkTnH1pVqkdD3J6hFRBNl\nxNalUr04ooE6/GD1QElgi4hoiE7P8ThQMhUZERGNkhFbRESTZMiWEVt0HkkvSbpX0gOSfiRps3W4\n1zhJP6kfH1Gzaazp2pG1ft3atvG5eoawX8+3XHO1pKPXoq1Rkh5Y2z5GNEkCW3Si522Ptb07JS3Y\naX1fVLHWP9u2b7B9wetcMhJY68AWsT5pAP90qgS26HR3UpI5j5K0VNJ3KNlV3ilpgqT5khbVkd0w\nAEkTJS2RtIg+KaEkTZM0q368laRrJS2ujwOAC4Dt62jxwnrdOZIWSrqvJpnuvddMScsk/YqSsux1\nSTq53mexpB+3jELHS7qn3u/wev0QSRf2afvUdf2HjGZI2ZoEtuhgtYbcYZRaYAA7Apfa3g14Fjgf\nGG97L+Ae4OOSNgGuBCZRSq5svYbbXwzcbnsMpajm7yh1yB6qo8VzJE2obb4XGAv0SDpIUg8lD+dY\n4IPAPv34dq6xvU9t7w+U6tS9RtU2PgRcXr+H6cBTtvep9z9Z0rb9aCei8bJ5JDrRppLurR/fCXyD\nUjz1T7YX1Of3A3YF5tUSYhsD84GdgeW2HwSo5VdOWU0bBwMnANTadE9JelvLNRPq47f182GUQDcc\nuLY33VTNyflGdpf0Bcp05zBgbp/XflgP0z8o6eH6PUwA9uyz/jaitr2sH21Fg3XwQGvAJLBFJ3re\n9ti+T9Tg9Wzfp4BbbE9uue5VX7eOBHzJ9tdb2jjrTdzrauBI24slTQPG9XnNLde6tn2m7b4BEEmj\n3kTb0SSJbJmKjMZaABwoaQcASUMljQaWUGqIbV+vm7yGr78NmFG/doikEcAzlNFYr7nASX3W7t4h\naUvgDuBISZtKGk6Z9nwjw4FHJW0ETG157RhJG9Q+b0cpHDsXmFGvR9JoSUP70U5E42XEFo1ke0Ud\n+cyW9Jb69Pm2l0k6BbhJ0nOUqczhq7nFx4ArJE2nVB2fYXu+pHl1O/3NdZ1tF2B+HTGuAo6zvUjS\nHGAx8DiwsB9d/gxwF7Ci/t23T38G7gbeCpxm+wVJV1HW3hbVQrErgCP7968TTdbJuxkHikql94iI\n6HQ9PXt73l33DNj9Nt1Iv7G994DdcD1JYIuIaAhJPwO2GMBbrrQ9cQDvt14ksEVERKNk80hERDRK\nAltERDRKAltERDRKAltERDRKAltERDRKAltERDRKAltERDRKAltERDRKAltERDTKfwFfJE/Zxf2V\nxgAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7f56681829e8>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plot(actual_value = df_.loc[:,'Actual'].values.astype(int),\n",
    "     pred_value = df_.loc[:,'Prediction'].values.astype(int))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:35:27.416690Z",
     "start_time": "2017-07-23T23:35:27.402487Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "no_of_features  hidden_layers\n",
       "1               1                (0.908368939624, 0.942862285696)\n",
       "                3                 (0.985690400269, 1.00773875175)\n",
       "dtype: object"
      ]
     },
     "execution_count": 34,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from scipy import stats\n",
    "\n",
    "def fn(x):\n",
    "    #print(x)\n",
    "    return stats.norm.interval(0.95, loc=x.f1_score.mean(), scale=x.f1_score.std())\n",
    "psg.apply(fn)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "_draft": {
   "nbviewer_url": "https://gist.github.com/7d1ace18a82178e15ece8fc5252fce88"
  },
  "anaconda-cloud": {},
  "gist": {
   "data": {
    "description": "Hyper parameter tuning",
    "public": false
   },
   "id": "7d1ace18a82178e15ece8fc5252fce88"
  },
  "kernelspec": {
   "display_name": "Python [conda env:p3]",
   "language": "python",
   "name": "conda-env-p3-py"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.1"
  },
  "varInspector": {
   "cols": {
    "lenName": 16,
    "lenType": 16,
    "lenVar": 40
   },
   "kernels_config": {
    "python": {
     "delete_cmd_postfix": "",
     "delete_cmd_prefix": "del ",
     "library": "var_list.py",
     "varRefreshCmd": "print(var_dic_list())"
    },
    "r": {
     "delete_cmd_postfix": ") ",
     "delete_cmd_prefix": "rm(",
     "library": "var_list.r",
     "varRefreshCmd": "cat(var_dic_list()) "
    }
   },
   "types_to_exclude": [
    "module",
    "function",
    "builtin_function_or_method",
    "instance",
    "_Feature"
   ],
   "window_display": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
