{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Read Data Sample"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:38:32.651592Z",
     "start_time": "2017-07-23T23:38:32.237090Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "import numpy as np\n",
    "import os\n",
    "import time\n",
    "from collections import namedtuple\n",
    "pd.set_option(\"display.max_rows\",35)\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:38:32.679642Z",
     "start_time": "2017-07-23T23:38:32.652931Z"
    }
   },
   "outputs": [],
   "source": [
    "%%bash\n",
    "rm dataset/scores/tf_dense_only_nsl_kdd_scores_all-.pkl"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:38:32.752308Z",
     "start_time": "2017-07-23T23:38:32.681730Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class dataset:\n",
    "    kdd_train_2labels = pd.read_pickle(\"dataset/kdd_train__2labels.pkl\")\n",
    "    kdd_test_2labels = pd.read_pickle(\"dataset/kdd_test_2labels.pkl\")\n",
    "    kdd_test__2labels = pd.read_pickle(\"dataset/kdd_test__2labels.pkl\")\n",
    "    \n",
    "    kdd_train_5labels = pd.read_pickle(\"dataset/kdd_train_5labels.pkl\")\n",
    "    kdd_test_5labels = pd.read_pickle(\"dataset/kdd_test_5labels.pkl\")\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:38:32.759364Z",
     "start_time": "2017-07-23T23:38:32.753834Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(25192, 124)"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dataset.kdd_train_2labels.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:38:32.764726Z",
     "start_time": "2017-07-23T23:38:32.760703Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(22544, 124)"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dataset.kdd_test_2labels.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:38:33.189126Z",
     "start_time": "2017-07-23T23:38:32.766190Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.97509982675167528"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn import model_selection as ms\n",
    "from sklearn import preprocessing as pp\n",
    "\n",
    "class preprocess:\n",
    "    \n",
    "    output_columns_2labels = ['is_Normal','is_Attack']\n",
    "    \n",
    "    x_input = dataset.kdd_train_2labels.drop(output_columns_2labels, axis = 1)\n",
    "    y_output = dataset.kdd_train_2labels.loc[:,output_columns_2labels]\n",
    "\n",
    "    x_test_input = dataset.kdd_test_2labels.drop(output_columns_2labels, axis = 1)\n",
    "    y_test = dataset.kdd_test_2labels.loc[:,output_columns_2labels]\n",
    "    \n",
    "    x_test__input = dataset.kdd_test__2labels.drop(output_columns_2labels, axis = 1)\n",
    "    y_test_ = dataset.kdd_test__2labels.loc[:,output_columns_2labels]\n",
    "\n",
    "    ss = pp.StandardScaler()\n",
    "\n",
    "    x_train = ss.fit_transform(x_input)\n",
    "    x_test = ss.transform(x_test_input)\n",
    "    x_test_ = ss.transform(x_test__input)\n",
    "\n",
    "    y_train = y_output.values\n",
    "    y_test = y_test.values\n",
    "    y_test_ = y_test_.values\n",
    "\n",
    "    \n",
    "preprocess.x_train.std()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:38:35.032545Z",
     "start_time": "2017-07-23T23:38:33.190698Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "from tensorflow.contrib.legacy_seq2seq.python.ops.seq2seq import basic_rnn_seq2seq\n",
    "from tensorflow.contrib.rnn import RNNCell, LSTMCell, MultiRNNCell\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:38:35.294051Z",
     "start_time": "2017-07-23T23:38:35.034196Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class network(object):\n",
    "    \n",
    "    input_dim = 122\n",
    "    classes = 2\n",
    "    hidden_encoder_dim = 122\n",
    "    hidden_layers = 1\n",
    "    latent_dim = 10\n",
    "\n",
    "    hidden_decoder_dim = 122\n",
    "    lam = 0.01\n",
    "    \n",
    "    def __init__(self, classes, hidden_layers, num_of_features):\n",
    "        self.classes = classes\n",
    "        self.hidden_layers = hidden_layers\n",
    "        self.latent_dim = num_of_features\n",
    "            \n",
    "    def build_layers(self):\n",
    "        tf.reset_default_graph()\n",
    "        #learning_rate = tf.Variable(initial_value=0.001)\n",
    "\n",
    "        input_dim = self.input_dim\n",
    "        classes = self.classes\n",
    "        hidden_encoder_dim = self.hidden_encoder_dim\n",
    "        hidden_layers = self.hidden_layers\n",
    "        latent_dim = self.latent_dim\n",
    "        hidden_decoder_dim = self.hidden_decoder_dim\n",
    "        lam = self.lam\n",
    "        \n",
    "        with tf.variable_scope(\"Input\"):\n",
    "            self.x_input = tf.placeholder(\"float\", shape=[None, 1, input_dim])\n",
    "            self.y_input_ = tf.placeholder(\"float\", shape=[None, 1, classes])\n",
    "            self.keep_prob = tf.placeholder(\"float\")\n",
    "            self.lr = tf.placeholder(\"float\")\n",
    "            self.x_list = tf.unstack(self.x_input, axis= 1)\n",
    "            self.y_list_ = tf.unstack(self.y_input_, axis = 1)\n",
    "            self.y_ = self.y_list_[0]\n",
    "            \n",
    "            #GO = tf.fill((tf.shape(self.x)[0], 1), 0.5)\n",
    "            \n",
    "            #y_with_GO = tf.stack([self.y_, GO])\n",
    "            \n",
    "        with tf.variable_scope(\"lstm\"):\n",
    "            multi_cell = MultiRNNCell([LSTMCell(input_dim) for i in range(hidden_layers)] )\n",
    "            \n",
    "            self.y, states = basic_rnn_seq2seq(self.x_list, self.y_list_, multi_cell)\n",
    "            #self.y = tf.slice(self.y, [0, 0], [-1,2])\n",
    "            \n",
    "            #self.out = tf.squeeze(self.y)\n",
    "            \n",
    "            #self.y = tf.layers.dense(self.y[0], classes, activation = None)\n",
    "            \n",
    "            self.y = tf.slice(self.y[0], [0, 0], [-1,2])\n",
    "            \n",
    "        with tf.variable_scope(\"Loss\"):\n",
    "            \n",
    "            self.regularized_loss = tf.losses.mean_squared_error(self.y_, self.y)\n",
    "            correct_prediction = tf.equal(tf.argmax(self.y_, 1), tf.argmax(self.y, 1))\n",
    "            self.tf_accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32), name = \"Accuracy\")\n",
    "\n",
    "        with tf.variable_scope(\"Optimizer\"):\n",
    "            learning_rate=self.lr\n",
    "            optimizer = tf.train.AdamOptimizer(learning_rate)\n",
    "            gradients, variables = zip(*optimizer.compute_gradients(self.regularized_loss))\n",
    "            gradients = [\n",
    "                None if gradient is None else tf.clip_by_value(gradient, -1, 1)\n",
    "                for gradient in gradients]\n",
    "            self.train_op = optimizer.apply_gradients(zip(gradients, variables))\n",
    "            #self.train_op = optimizer.minimize(self.regularized_loss)\n",
    "            \n",
    "        # add op for merging summary\n",
    "        #self.summary_op = tf.summary.merge_all()\n",
    "        self.pred = tf.argmax(self.y, axis = 1)\n",
    "        self.actual = tf.argmax(self.y_, axis = 1)\n",
    "\n",
    "        # add Saver ops\n",
    "        self.saver = tf.train.Saver()\n",
    "        "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-06-01T00:59:00.684124Z",
     "start_time": "2017-06-01T00:58:59.843181Z"
    }
   },
   "source": [
    "batch_iterations = 200\n",
    "\n",
    "x_train, x_valid, y_train, y_valid, = ms.train_test_split(preprocess.x_train, \n",
    "                                                                          preprocess.y_train, \n",
    "                                                                          test_size=0.1)\n",
    "batch_indices = np.array_split(np.arange(x_train.shape[0]), \n",
    "                                           batch_iterations)\n",
    "                                                                          \n",
    "for i in batch_indices:\n",
    "    print(x_train[i,np.newaxis,:])\n",
    "    print(y_train[i,np.newaxis,:])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:38:35.635236Z",
     "start_time": "2017-07-23T23:38:35.295617Z"
    },
    "collapsed": true,
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "import collections\n",
    "import sklearn.metrics as me\n",
    "\n",
    "class Train:    \n",
    "    \n",
    "    result = namedtuple(\"score\", ['epoch', 'no_of_features','hidden_layers','train_score', 'test_score', 'f1_score', 'test_score_20', 'f1_score_20', 'time_taken'])\n",
    "\n",
    "    predictions = {}\n",
    "    predictions_ = {}\n",
    "\n",
    "    results = []\n",
    "    best_acc = 0\n",
    "    best_acc_global = 0\n",
    "\n",
    "    def train(epochs, net, h,f, lrs):\n",
    "        batch_iterations = 200\n",
    "        train_loss = None\n",
    "        Train.best_acc = 0\n",
    "        os.makedirs(\"dataset/tf_lstm_nsl_kdd-orig-/hidden layers_{}_features count_{}\".format(h,f),\n",
    "                    exist_ok = True)\n",
    "        with tf.Session() as sess:\n",
    "            #summary_writer_train = tf.summary.FileWriter('./logs/kdd/VAE/training', graph=sess.graph)\n",
    "            #summary_writer_valid = tf.summary.FileWriter('./logs/kdd/VAE/validation')\n",
    "\n",
    "            sess.run(tf.global_variables_initializer())\n",
    "            start_time = time.perf_counter()\n",
    "            \n",
    "            accuracy, pred_value, actual_value, y_pred = sess.run([net.tf_accuracy, \n",
    "                                                                   net.pred, \n",
    "                                                                   net.actual, net.y], \n",
    "                                                                  feed_dict={net.x_input: preprocess.x_test[:,np.newaxis,:], \n",
    "                                                                             net.y_input_: preprocess.y_test[:,np.newaxis,:], \n",
    "                                                                             net.keep_prob:1})\n",
    "            \n",
    "            print(\"Initial Accuracy, before training: {}\".format(accuracy))\n",
    "            \n",
    "            for c, lr in enumerate(lrs):\n",
    "                for epoch in range(1, (epochs+1)):\n",
    "                    x_train, x_valid, y_train, y_valid, = ms.train_test_split(preprocess.x_train, \n",
    "                                                                              preprocess.y_train, \n",
    "                                                                              test_size=0.1)\n",
    "                    batch_indices = np.array_split(np.arange(x_train.shape[0]), \n",
    "                                               batch_iterations)\n",
    "\n",
    "                    for i in batch_indices:\n",
    "\n",
    "                        _, train_loss = sess.run([net.train_op, net.regularized_loss], #net.summary_op\n",
    "                                                              feed_dict={net.x_input: x_train[i,np.newaxis,:], \n",
    "                                                                         net.y_input_: y_train[i,np.newaxis,:], \n",
    "                                                                         net.keep_prob:1, net.lr:lr})\n",
    "                        #summary_writer_train.add_summary(summary_str, epoch)\n",
    "                        if(train_loss > 1e9):\n",
    "                            print(\"Step {} | Training Loss: {:.6f}\".format(epoch, train_loss))\n",
    "\n",
    "\n",
    "                    valid_accuracy,valid_loss = sess.run([net.tf_accuracy, net.regularized_loss], #net.summary_op \n",
    "                                                          feed_dict={net.x_input: x_valid[:,np.newaxis,:], \n",
    "                                                                     net.y_input_: y_valid[:,np.newaxis,:], \n",
    "                                                                     net.keep_prob:1, net.lr:lr})\n",
    "                    #summary_writer_valid.add_summary(summary_str, epoch)\n",
    "\n",
    "\n",
    "\n",
    "                    accuracy, pred_value, actual_value, y_pred = sess.run([net.tf_accuracy, \n",
    "                                                                   net.pred, \n",
    "                                                                   net.actual, net.y], \n",
    "                                                                  feed_dict={net.x_input: preprocess.x_test[:,np.newaxis,:], \n",
    "                                                                             net.y_input_: preprocess.y_test[:,np.newaxis,:], \n",
    "                                                                             net.keep_prob:1, net.lr:lr})\n",
    "                    f1_score = me.f1_score(actual_value, pred_value)\n",
    "                    accuracy_, pred_value_, actual_value_, y_pred_ = sess.run([net.tf_accuracy, \n",
    "                                                                   net.pred, \n",
    "                                                                   net.actual, net.y], \n",
    "                                                                  feed_dict={net.x_input: preprocess.x_test_[:,np.newaxis,:], \n",
    "                                                                             net.y_input_: preprocess.y_test_[:,np.newaxis,:], \n",
    "                                                                             net.keep_prob:1, net.lr:lr})\n",
    "                    f1_score_ = me.f1_score(actual_value_, pred_value_)\n",
    "                    print(\"Step {} | Training Loss: {:.6f} | Train Accuracy: {:.6f} | Test Accuracy: {:.6f}, {:.6f}\".format(epoch, train_loss, valid_accuracy, accuracy, accuracy_))\n",
    "\n",
    "                    if accuracy > Train.best_acc_global:\n",
    "                                Train.best_acc_global = accuracy\n",
    "                                Train.pred_value = pred_value\n",
    "                                Train.actual_value = actual_value\n",
    "                                Train.pred_value_ = pred_value_\n",
    "                                Train.actual_value_ = actual_value_\n",
    "                                Train.best_parameters = \"Hidden Layers:{}, Features Count:{}\".format(h, f)\n",
    "\n",
    "                    if accuracy > Train.best_acc:\n",
    "\n",
    "                        #net.saver.save(sess, \"dataset/tf_vae_only_nsl_kdd_hidden layers_{}_features count_{}\".format(epochs,h,f))\n",
    "                        #Train.results.append(Train.result(epochs, f, h,valid_accuracy, accuracy))\n",
    "                        #curr_pred = pd.DataFrame({\"Attack_prob\":y_pred[:,-2], \"Normal_prob\":y_pred[:, -1]})\n",
    "                        #Train.predictions.update({\"{}_{}_{}\".format(epochs,f,h):curr_pred})\n",
    "\n",
    "                        Train.best_acc = accuracy\n",
    "                        if not (np.isnan(train_loss)):\n",
    "                            net.saver.save(sess, \n",
    "                                       \"dataset/tf_lstm_nsl_kdd-orig-/hidden layers_{}_features count_{}/model\"\n",
    "                                       .format(h,f), \n",
    "                                       global_step = epoch, \n",
    "                                       write_meta_graph=False)\n",
    "\n",
    "                        curr_pred = pd.DataFrame({\"Attack_prob\":y_pred[:,-2], \"Normal_prob\":y_pred[:, -1], \"Prediction\":pred_value, \"Actual\":actual_value})\n",
    "                        curr_pred_ = pd.DataFrame({\"Attack_prob\":y_pred_[:,-2], \"Normal_prob\":y_pred_[:, -1], \"Prediction\":pred_value_, \"Actual\": actual_value_})\n",
    "                        Train.predictions.update({\"{}_{}_{}\".format((epochs+1)* (c+1),f,h):\n",
    "                                                  (curr_pred,\n",
    "                                                   Train.result((epochs+1)*(c+1), f, h,valid_accuracy, accuracy, f1_score, accuracy_, f1_score_, time.perf_counter() - start_time))})\n",
    "                        Train.predictions_.update({\"{}_{}_{}\".format((epochs+1)* (c+1),f,h):\n",
    "                                                  (curr_pred_,\n",
    "                                                   Train.result((epochs+1)*(c+1), f, h,valid_accuracy, accuracy, f1_score, accuracy_, f1_score_, time.perf_counter() - start_time))})\n",
    "\n",
    "\n",
    "\n",
    "            "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:38:35.715841Z",
     "start_time": "2017-07-23T23:38:35.636796Z"
    },
    "collapsed": true,
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "import itertools\n",
    "\n",
    "df_results = []\n",
    "past_scores = []\n",
    "\n",
    "class Hyperparameters:\n",
    "#    features_arr = [2, 4, 8, 16, 32, 64, 128, 256]\n",
    "#    hidden_layers_arr = [2, 4, 6, 10]\n",
    "\n",
    "    def start_training():\n",
    "\n",
    "        global df_results\n",
    "        global past_scores\n",
    "        \n",
    "        Train.predictions = {}\n",
    "        Train.results = []\n",
    "        \n",
    "        features_arr = [1] #[4, 8, 16, 32]\n",
    "        hidden_layers_arr = [1, 3]\n",
    "\n",
    "        epochs = [10]\n",
    "        lrs = [1e-2, 1e-3]\n",
    "\n",
    "        for e, h, f in itertools.product(epochs, hidden_layers_arr, features_arr):\n",
    "            print(\"Current Layer Attributes - epochs:{} hidden layers:{} features count:{}\".format(e,h,f))\n",
    "            n = network(2,h,f)\n",
    "            n.build_layers()\n",
    "            Train.train(e, n, h,f, lrs)\n",
    "            \n",
    "        dict1 = {}\n",
    "        dict1_ = {}\n",
    "        dict2 = []\n",
    "        for k, (v1, v2) in Train.predictions.items():\n",
    "            dict1.update({k: v1})\n",
    "            dict2.append(v2)\n",
    "\n",
    "        for k, (v1_, v2) in Train.predictions.items():\n",
    "            dict1_.update({k: v1_})\n",
    "\n",
    "            \n",
    "        Train.predictions = dict1\n",
    "        Train.predictions_ = dict1_\n",
    "\n",
    "        Train.results = dict2\n",
    "        df_results = pd.DataFrame(Train.results)\n",
    "        temp = df_results.set_index(['no_of_features', 'hidden_layers'])\n",
    "\n",
    "        if not os.path.isfile('dataset/scores/tf_lstm_nsl_kdd-orig_all-.pkl'):\n",
    "            past_scores = temp\n",
    "        else:\n",
    "            past_scores = pd.read_pickle(\"dataset/scores/tf_lstm_nsl_kdd-orig_all-.pkl\")\n",
    "\n",
    "        past_scores.append(temp).to_pickle(\"dataset/scores/tf_lstm_nsl_kdd-orig_all-.pkl\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:40:01.118166Z",
     "start_time": "2017-07-23T23:38:35.717482Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Current Layer Attributes - epochs:10 hidden layers:1 features count:1\n",
      "Initial Accuracy, before training: 0.631076991558075\n",
      "Step 1 | Training Loss: 0.017782 | Train Accuracy: 0.984921 | Test Accuracy: 0.778300, 0.579325\n",
      "Step 2 | Training Loss: 0.002807 | Train Accuracy: 0.998413 | Test Accuracy: 0.811746, 0.641857\n",
      "Step 3 | Training Loss: 0.002106 | Train Accuracy: 0.998016 | Test Accuracy: 0.834634, 0.685401\n",
      "Step 4 | Training Loss: 0.002415 | Train Accuracy: 0.999206 | Test Accuracy: 0.877972, 0.767848\n",
      "Step 5 | Training Loss: 0.001275 | Train Accuracy: 0.998810 | Test Accuracy: 0.888751, 0.788354\n",
      "Step 6 | Training Loss: 0.001042 | Train Accuracy: 0.999603 | Test Accuracy: 0.881964, 0.775443\n",
      "Step 7 | Training Loss: 0.001309 | Train Accuracy: 0.998413 | Test Accuracy: 0.897933, 0.805823\n",
      "Step 8 | Training Loss: 0.001469 | Train Accuracy: 0.999206 | Test Accuracy: 0.893630, 0.797637\n",
      "Step 9 | Training Loss: 0.001851 | Train Accuracy: 0.999603 | Test Accuracy: 0.897933, 0.805823\n",
      "Step 10 | Training Loss: 0.001222 | Train Accuracy: 0.999603 | Test Accuracy: 0.897889, 0.805738\n",
      "Step 1 | Training Loss: 0.002581 | Train Accuracy: 0.999603 | Test Accuracy: 0.896957, 0.803966\n",
      "Step 2 | Training Loss: 0.002195 | Train Accuracy: 0.999603 | Test Accuracy: 0.896469, 0.803038\n",
      "Step 3 | Training Loss: 0.001027 | Train Accuracy: 0.999603 | Test Accuracy: 0.895538, 0.801266\n",
      "Step 4 | Training Loss: 0.001704 | Train Accuracy: 0.999603 | Test Accuracy: 0.895316, 0.800844\n",
      "Step 5 | Training Loss: 0.001718 | Train Accuracy: 1.000000 | Test Accuracy: 0.894251, 0.798819\n",
      "Step 6 | Training Loss: 0.001270 | Train Accuracy: 0.999603 | Test Accuracy: 0.894340, 0.798987\n",
      "Step 7 | Training Loss: 0.002221 | Train Accuracy: 0.999603 | Test Accuracy: 0.893941, 0.798228\n",
      "Step 8 | Training Loss: 0.001197 | Train Accuracy: 0.999603 | Test Accuracy: 0.893852, 0.798059\n",
      "Step 9 | Training Loss: 0.010017 | Train Accuracy: 0.999603 | Test Accuracy: 0.893763, 0.797890\n",
      "Step 10 | Training Loss: 0.006140 | Train Accuracy: 0.999603 | Test Accuracy: 0.893896, 0.798143\n",
      "Current Layer Attributes - epochs:10 hidden layers:3 features count:1\n",
      "Initial Accuracy, before training: 0.41221609711647034\n",
      "Step 1 | Training Loss: 0.000778 | Train Accuracy: 0.998810 | Test Accuracy: 0.973607, 0.949789\n",
      "Step 2 | Training Loss: 0.000664 | Train Accuracy: 0.998810 | Test Accuracy: 0.977511, 0.957215\n",
      "Step 3 | Training Loss: 0.000653 | Train Accuracy: 1.000000 | Test Accuracy: 0.978708, 0.959494\n",
      "Step 4 | Training Loss: 0.000654 | Train Accuracy: 0.999206 | Test Accuracy: 0.969837, 0.942616\n",
      "Step 5 | Training Loss: 0.000650 | Train Accuracy: 1.000000 | Test Accuracy: 0.998226, 0.996624\n",
      "Step 6 | Training Loss: 0.000651 | Train Accuracy: 0.999603 | Test Accuracy: 0.998669, 0.997468\n",
      "Step 7 | Training Loss: 0.000649 | Train Accuracy: 1.000000 | Test Accuracy: 0.983410, 0.968439\n",
      "Step 8 | Training Loss: 0.000649 | Train Accuracy: 1.000000 | Test Accuracy: 0.983499, 0.968608\n",
      "Step 9 | Training Loss: 0.000651 | Train Accuracy: 1.000000 | Test Accuracy: 0.999157, 0.998397\n",
      "Step 10 | Training Loss: 0.000648 | Train Accuracy: 1.000000 | Test Accuracy: 0.999113, 0.998312\n",
      "Step 1 | Training Loss: 0.000648 | Train Accuracy: 1.000000 | Test Accuracy: 0.999113, 0.998312\n",
      "Step 2 | Training Loss: 0.000648 | Train Accuracy: 1.000000 | Test Accuracy: 0.999113, 0.998312\n",
      "Step 3 | Training Loss: 0.000648 | Train Accuracy: 1.000000 | Test Accuracy: 0.999113, 0.998312\n",
      "Step 4 | Training Loss: 0.000648 | Train Accuracy: 1.000000 | Test Accuracy: 0.999113, 0.998312\n",
      "Step 5 | Training Loss: 0.000648 | Train Accuracy: 1.000000 | Test Accuracy: 0.999113, 0.998312\n",
      "Step 6 | Training Loss: 0.000648 | Train Accuracy: 1.000000 | Test Accuracy: 0.999113, 0.998312\n",
      "Step 7 | Training Loss: 0.000648 | Train Accuracy: 1.000000 | Test Accuracy: 0.999113, 0.998312\n",
      "Step 8 | Training Loss: 0.000648 | Train Accuracy: 1.000000 | Test Accuracy: 0.999113, 0.998312\n",
      "Step 9 | Training Loss: 0.000647 | Train Accuracy: 1.000000 | Test Accuracy: 0.999113, 0.998312\n",
      "Step 10 | Training Loss: 0.000648 | Train Accuracy: 1.000000 | Test Accuracy: 0.999113, 0.998312\n"
     ]
    }
   ],
   "source": [
    "#%%timeit -r 10\n",
    "\n",
    "Hyperparameters.start_training()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:40:01.130957Z",
     "start_time": "2017-07-23T23:40:01.119836Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "pd.Panel(Train.predictions).to_pickle(\"dataset/tf_lstm_nsl_kdd_predictions-.pkl\")\n",
    "pd.Panel(Train.predictions_).to_pickle(\"dataset/tf_lstm_nsl_kdd_predictions-__.pkl\")\n",
    "\n",
    "#df_results.to_pickle(\"dataset/tf_lstm_nsl_kdd_scores-.pkl\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:40:01.208845Z",
     "start_time": "2017-07-23T23:40:01.132745Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import itertools\n",
    "\n",
    "def plot_confusion_matrix(cm, classes,\n",
    "                          normalize=False,\n",
    "                          title='Confusion matrix',\n",
    "                          cmap=plt.cm.Blues):\n",
    "    \"\"\"\n",
    "    This function prints and plots the confusion matrix.\n",
    "    Normalization can be applied by setting `normalize=True`.\n",
    "    \"\"\"\n",
    "    np.set_printoptions(precision=4)\n",
    "\n",
    "    plt.imshow(cm, interpolation='nearest', cmap=cmap)\n",
    "    plt.title(title)\n",
    "    plt.colorbar()\n",
    "    tick_marks = np.arange(len(classes))\n",
    "    plt.xticks(tick_marks, classes, rotation=45)\n",
    "    plt.yticks(tick_marks, classes)\n",
    "\n",
    "    if normalize:\n",
    "        cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n",
    "        #print(\"Normalized confusion matrix\")\n",
    "    else:\n",
    "        #print('Confusion matrix, without normalization')\n",
    "        pass\n",
    "    \n",
    "    #print(cm)\n",
    "\n",
    "    label = [[\"\\n True Negative\", \"\\n False Positive \\n Type II Error\"],\n",
    "             [\"\\n False Negative \\n Type I Error\", \"\\n True Positive\"]\n",
    "            ]\n",
    "    \n",
    "    thresh = cm.max() / 2.\n",
    "    for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):\n",
    "        \n",
    "        plt.text(j, i, \"{} {}\".format(cm[i, j].round(4), label[i][j]),\n",
    "                 horizontalalignment=\"center\",\n",
    "                 color=\"white\" if cm[i, j] > thresh else \"black\")\n",
    "\n",
    "    plt.tight_layout()\n",
    "    plt.ylabel('True label')\n",
    "    plt.xlabel('Predicted label')\n",
    "\n",
    "def plot(actual_value, pred_value):\n",
    "    from sklearn.metrics import confusion_matrix\n",
    "\n",
    "    cm_2labels = confusion_matrix(y_pred = pred_value, y_true = actual_value)\n",
    "    plt.figure(figsize=[6,6])\n",
    "    plot_confusion_matrix(cm_2labels, ['Normal', 'Attack'], normalize = False)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:40:01.215750Z",
     "start_time": "2017-07-23T23:40:01.210729Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "past_scores = pd.read_pickle(\"dataset/scores/tf_lstm_nsl_kdd-orig_all-.pkl\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:40:01.239364Z",
     "start_time": "2017-07-23T23:40:01.217393Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th>epoch</th>\n",
       "      <th>train_score</th>\n",
       "      <th>test_score</th>\n",
       "      <th>f1_score</th>\n",
       "      <th>test_score_20</th>\n",
       "      <th>f1_score_20</th>\n",
       "      <th>time_taken</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>no_of_features</th>\n",
       "      <th>hidden_layers</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th rowspan=\"27\" valign=\"top\">1</th>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>15.237297</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.999734</td>\n",
       "      <td>0.999766</td>\n",
       "      <td>0.999494</td>\n",
       "      <td>0.999691</td>\n",
       "      <td>6.295392</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.999734</td>\n",
       "      <td>0.999766</td>\n",
       "      <td>0.999494</td>\n",
       "      <td>0.999691</td>\n",
       "      <td>12.354382</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>0.998016</td>\n",
       "      <td>0.999379</td>\n",
       "      <td>0.999455</td>\n",
       "      <td>0.999156</td>\n",
       "      <td>0.999485</td>\n",
       "      <td>11.837164</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.999290</td>\n",
       "      <td>0.999377</td>\n",
       "      <td>0.998650</td>\n",
       "      <td>0.999175</td>\n",
       "      <td>11.787666</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.999157</td>\n",
       "      <td>0.999260</td>\n",
       "      <td>0.998397</td>\n",
       "      <td>0.999021</td>\n",
       "      <td>27.536535</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.997072</td>\n",
       "      <td>0.997422</td>\n",
       "      <td>0.994430</td>\n",
       "      <td>0.996586</td>\n",
       "      <td>11.624922</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>22</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.994633</td>\n",
       "      <td>0.995302</td>\n",
       "      <td>0.989789</td>\n",
       "      <td>0.993791</td>\n",
       "      <td>38.527330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.993701</td>\n",
       "      <td>0.994492</td>\n",
       "      <td>0.988017</td>\n",
       "      <td>0.992722</td>\n",
       "      <td>27.259746</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>0.998810</td>\n",
       "      <td>0.991084</td>\n",
       "      <td>0.992140</td>\n",
       "      <td>0.983122</td>\n",
       "      <td>0.989639</td>\n",
       "      <td>8.618293</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.986914</td>\n",
       "      <td>0.988427</td>\n",
       "      <td>0.975105</td>\n",
       "      <td>0.984652</td>\n",
       "      <td>9.510270</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999206</td>\n",
       "      <td>0.980704</td>\n",
       "      <td>0.983254</td>\n",
       "      <td>0.963291</td>\n",
       "      <td>0.977927</td>\n",
       "      <td>8.956038</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>0.999206</td>\n",
       "      <td>0.980704</td>\n",
       "      <td>0.983254</td>\n",
       "      <td>0.963291</td>\n",
       "      <td>0.977927</td>\n",
       "      <td>8.956038</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.976579</td>\n",
       "      <td>0.979063</td>\n",
       "      <td>0.955443</td>\n",
       "      <td>0.972134</td>\n",
       "      <td>6.629817</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.918559</td>\n",
       "      <td>0.924326</td>\n",
       "      <td>0.845063</td>\n",
       "      <td>0.897955</td>\n",
       "      <td>11.249477</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.906450</td>\n",
       "      <td>0.914591</td>\n",
       "      <td>0.822025</td>\n",
       "      <td>0.885524</td>\n",
       "      <td>7.625335</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.903699</td>\n",
       "      <td>0.912080</td>\n",
       "      <td>0.816793</td>\n",
       "      <td>0.882158</td>\n",
       "      <td>13.398739</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.899707</td>\n",
       "      <td>0.908250</td>\n",
       "      <td>0.809198</td>\n",
       "      <td>0.876939</td>\n",
       "      <td>6.418205</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.899574</td>\n",
       "      <td>0.908087</td>\n",
       "      <td>0.808945</td>\n",
       "      <td>0.876702</td>\n",
       "      <td>9.361751</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.899175</td>\n",
       "      <td>0.907590</td>\n",
       "      <td>0.808186</td>\n",
       "      <td>0.875975</td>\n",
       "      <td>8.658957</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.899175</td>\n",
       "      <td>0.907590</td>\n",
       "      <td>0.808186</td>\n",
       "      <td>0.875975</td>\n",
       "      <td>8.658957</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.898776</td>\n",
       "      <td>0.907130</td>\n",
       "      <td>0.807426</td>\n",
       "      <td>0.875314</td>\n",
       "      <td>11.615757</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>0.998413</td>\n",
       "      <td>0.897933</td>\n",
       "      <td>0.906300</td>\n",
       "      <td>0.805823</td>\n",
       "      <td>0.874173</td>\n",
       "      <td>7.838056</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.897844</td>\n",
       "      <td>0.906195</td>\n",
       "      <td>0.805654</td>\n",
       "      <td>0.874022</td>\n",
       "      <td>9.943235</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.897667</td>\n",
       "      <td>0.905940</td>\n",
       "      <td>0.805316</td>\n",
       "      <td>0.873638</td>\n",
       "      <td>10.647751</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.893586</td>\n",
       "      <td>0.902110</td>\n",
       "      <td>0.797553</td>\n",
       "      <td>0.868454</td>\n",
       "      <td>8.407638</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.891501</td>\n",
       "      <td>0.899655</td>\n",
       "      <td>0.793586</td>\n",
       "      <td>0.864907</td>\n",
       "      <td>11.260245</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                              epoch  train_score  test_score  f1_score  \\\n",
       "no_of_features hidden_layers                                             \n",
       "1              3                 11     1.000000    1.000000  1.000000   \n",
       "               3                 11     1.000000    0.999734  0.999766   \n",
       "               3                 11     1.000000    0.999734  0.999766   \n",
       "               3                 11     0.998016    0.999379  0.999455   \n",
       "               3                 11     1.000000    0.999290  0.999377   \n",
       "               3                 11     1.000000    0.999157  0.999260   \n",
       "               3                 11     1.000000    0.997072  0.997422   \n",
       "               3                 22     1.000000    0.994633  0.995302   \n",
       "               3                 11     1.000000    0.993701  0.994492   \n",
       "               3                 11     0.998810    0.991084  0.992140   \n",
       "               3                 11     1.000000    0.986914  0.988427   \n",
       "               3                 11     0.999206    0.980704  0.983254   \n",
       "               3                 11     0.999206    0.980704  0.983254   \n",
       "               3                 11     1.000000    0.976579  0.979063   \n",
       "               1                 11     1.000000    0.918559  0.924326   \n",
       "               1                 11     1.000000    0.906450  0.914591   \n",
       "               1                 11     1.000000    0.903699  0.912080   \n",
       "               1                 11     1.000000    0.899707  0.908250   \n",
       "               1                 11     1.000000    0.899574  0.908087   \n",
       "               1                 11     1.000000    0.899175  0.907590   \n",
       "               1                 11     1.000000    0.899175  0.907590   \n",
       "               1                 11     1.000000    0.898776  0.907130   \n",
       "               1                 11     0.998413    0.897933  0.906300   \n",
       "               1                 11     1.000000    0.897844  0.906195   \n",
       "               1                 11     1.000000    0.897667  0.905940   \n",
       "               1                 11     1.000000    0.893586  0.902110   \n",
       "               1                 11     1.000000    0.891501  0.899655   \n",
       "\n",
       "                              test_score_20  f1_score_20  time_taken  \n",
       "no_of_features hidden_layers                                          \n",
       "1              3                   1.000000     1.000000   15.237297  \n",
       "               3                   0.999494     0.999691    6.295392  \n",
       "               3                   0.999494     0.999691   12.354382  \n",
       "               3                   0.999156     0.999485   11.837164  \n",
       "               3                   0.998650     0.999175   11.787666  \n",
       "               3                   0.998397     0.999021   27.536535  \n",
       "               3                   0.994430     0.996586   11.624922  \n",
       "               3                   0.989789     0.993791   38.527330  \n",
       "               3                   0.988017     0.992722   27.259746  \n",
       "               3                   0.983122     0.989639    8.618293  \n",
       "               3                   0.975105     0.984652    9.510270  \n",
       "               3                   0.963291     0.977927    8.956038  \n",
       "               3                   0.963291     0.977927    8.956038  \n",
       "               3                   0.955443     0.972134    6.629817  \n",
       "               1                   0.845063     0.897955   11.249477  \n",
       "               1                   0.822025     0.885524    7.625335  \n",
       "               1                   0.816793     0.882158   13.398739  \n",
       "               1                   0.809198     0.876939    6.418205  \n",
       "               1                   0.808945     0.876702    9.361751  \n",
       "               1                   0.808186     0.875975    8.658957  \n",
       "               1                   0.808186     0.875975    8.658957  \n",
       "               1                   0.807426     0.875314   11.615757  \n",
       "               1                   0.805823     0.874173    7.838056  \n",
       "               1                   0.805654     0.874022    9.943235  \n",
       "               1                   0.805316     0.873638   10.647751  \n",
       "               1                   0.797553     0.868454    8.407638  \n",
       "               1                   0.793586     0.864907   11.260245  "
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "past_scores.sort_values(by='f1_score',ascending=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:40:01.259116Z",
     "start_time": "2017-07-23T23:40:01.241093Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th>epoch</th>\n",
       "      <th>train_score</th>\n",
       "      <th>test_score</th>\n",
       "      <th>f1_score</th>\n",
       "      <th>test_score_20</th>\n",
       "      <th>f1_score_20</th>\n",
       "      <th>time_taken</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>no_of_features</th>\n",
       "      <th>hidden_layers</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th rowspan=\"2\" valign=\"top\">1</th>\n",
       "      <th>3</th>\n",
       "      <td>11</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>15.237297</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.918559</td>\n",
       "      <td>0.924326</td>\n",
       "      <td>0.845063</td>\n",
       "      <td>0.897955</td>\n",
       "      <td>11.249477</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                              epoch  train_score  test_score  f1_score  \\\n",
       "no_of_features hidden_layers                                             \n",
       "1              3                 11          1.0    1.000000  1.000000   \n",
       "               1                 11          1.0    0.918559  0.924326   \n",
       "\n",
       "                              test_score_20  f1_score_20  time_taken  \n",
       "no_of_features hidden_layers                                          \n",
       "1              3                   1.000000     1.000000   15.237297  \n",
       "               1                   0.845063     0.897955   11.249477  "
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "psg = past_scores.sort_values(by='test_score', ascending=False).groupby(by=['no_of_features', 'hidden_layers'])\n",
    "psg.first().sort_values(by='test_score', ascending=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:40:01.276372Z",
     "start_time": "2017-07-23T23:40:01.260788Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th>epoch</th>\n",
       "      <th>train_score</th>\n",
       "      <th>test_score</th>\n",
       "      <th>f1_score</th>\n",
       "      <th>test_score_20</th>\n",
       "      <th>f1_score_20</th>\n",
       "      <th>time_taken</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>no_of_features</th>\n",
       "      <th>hidden_layers</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th rowspan=\"2\" valign=\"top\">1</th>\n",
       "      <th>3</th>\n",
       "      <td>11.785714</td>\n",
       "      <td>0.999660</td>\n",
       "      <td>0.992763</td>\n",
       "      <td>0.993641</td>\n",
       "      <td>0.986263</td>\n",
       "      <td>0.991603</td>\n",
       "      <td>14.652206</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>11.000000</td>\n",
       "      <td>0.999878</td>\n",
       "      <td>0.900280</td>\n",
       "      <td>0.908450</td>\n",
       "      <td>0.810289</td>\n",
       "      <td>0.877057</td>\n",
       "      <td>9.621854</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                                  epoch  train_score  test_score  f1_score  \\\n",
       "no_of_features hidden_layers                                                 \n",
       "1              3              11.785714     0.999660    0.992763  0.993641   \n",
       "               1              11.000000     0.999878    0.900280  0.908450   \n",
       "\n",
       "                              test_score_20  f1_score_20  time_taken  \n",
       "no_of_features hidden_layers                                          \n",
       "1              3                   0.986263     0.991603   14.652206  \n",
       "               1                   0.810289     0.877057    9.621854  "
      ]
     },
     "execution_count": 17,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "psg.mean().sort_values(by='test_score', ascending=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:40:01.282824Z",
     "start_time": "2017-07-23T23:40:01.277800Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "Train.predictions = pd.read_pickle(\"dataset/tf_lstm_nsl_kdd_predictions-.pkl\")\n",
    "Train.predictions_ = pd.read_pickle(\"dataset/tf_lstm_nsl_kdd_predictions-__.pkl\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:40:46.096042Z",
     "start_time": "2017-07-23T23:40:46.085692Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Actual</th>\n",
       "      <th>Attack_prob</th>\n",
       "      <th>Normal_prob</th>\n",
       "      <th>Prediction</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>12621</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.964016</td>\n",
       "      <td>0.00015</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "       Actual  Attack_prob  Normal_prob  Prediction\n",
       "12621     0.0     0.964016      0.00015         0.0"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#epoch_nof_hidden\n",
    "Train.predictions[\"11_1_3\"].sample()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:40:50.506416Z",
     "start_time": "2017-07-23T23:40:50.496201Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Actual</th>\n",
       "      <th>Attack_prob</th>\n",
       "      <th>Normal_prob</th>\n",
       "      <th>Prediction</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>4349</th>\n",
       "      <td>0.0</td>\n",
       "      <td>0.964014</td>\n",
       "      <td>0.000171</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "      Actual  Attack_prob  Normal_prob  Prediction\n",
       "4349     0.0     0.964014     0.000171         0.0"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "Train.predictions_[\"11_1_3\"].sample()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:40:59.766547Z",
     "start_time": "2017-07-23T23:40:59.759434Z"
    }
   },
   "outputs": [],
   "source": [
    "df = Train.predictions[\"11_1_3\"].dropna()\n",
    "df_ = Train.predictions_[\"11_1_3\"].dropna()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:41:02.325010Z",
     "start_time": "2017-07-23T23:41:02.318376Z"
    },
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from sklearn import metrics as me\n",
    "def get_score(y_true, y_pred):\n",
    "    f1 = me.f1_score(y_true, y_pred)\n",
    "    pre = me.precision_score(y_true, y_pred)\n",
    "    rec = me.recall_score(y_true, y_pred)\n",
    "    acc = me.accuracy_score(y_true, y_pred)\n",
    "    return {\"F1 Score\":f1, \"Precision\":pre, \"Recall\":rec, \"Accuracy\":acc}\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:41:03.006176Z",
     "start_time": "2017-07-23T23:41:02.956649Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Accuracy</th>\n",
       "      <th>F1 Score</th>\n",
       "      <th>Precision</th>\n",
       "      <th>Recall</th>\n",
       "      <th>Scenario</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.999157</td>\n",
       "      <td>0.99926</td>\n",
       "      <td>0.998522</td>\n",
       "      <td>1.0</td>\n",
       "      <td>Train+/Test+</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.999157</td>\n",
       "      <td>0.99926</td>\n",
       "      <td>0.998522</td>\n",
       "      <td>1.0</td>\n",
       "      <td>Train+/Test-</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   Accuracy  F1 Score  Precision  Recall      Scenario\n",
       "0  0.999157   0.99926   0.998522     1.0  Train+/Test+\n",
       "1  0.999157   0.99926   0.998522     1.0  Train+/Test-"
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn import metrics as me\n",
    "\n",
    "scores = get_score(df.loc[:,'Actual'].values.astype(int),\n",
    "                df.loc[:,'Prediction'].values.astype(int))\n",
    "scores.update({\"Scenario\":\"Train+/Test+\"})\n",
    "score_df = pd.DataFrame(scores, index=[0])\n",
    "\n",
    "scores = get_score(df_.loc[:,'Actual'].values.astype(int),\n",
    "                df_.loc[:,'Prediction'].values.astype(int))\n",
    "scores.update({\"Scenario\":\"Train+/Test-\"})\n",
    "\n",
    "score_df = score_df.append(pd.DataFrame(scores, index=[1]))\n",
    "\n",
    "score_df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:41:03.567393Z",
     "start_time": "2017-07-23T23:41:03.561256Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Actual\n",
       "0.0     9711\n",
       "1.0    12833\n",
       "Name: Actual, dtype: int64"
      ]
     },
     "execution_count": 26,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df.groupby(by=\"Actual\").Actual.count()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:41:04.924739Z",
     "start_time": "2017-07-23T23:41:04.633970Z"
    }
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbYAAAGkCAYAAABdFwDgAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XmcTnX/x/HXZxb7HskaSmWXsW8thEooS9MmLXS371R3\nv+qulJa7ReutFNqQFlIUWpDstFjKSIpEUrasM5/fH9cxXbYxZi4z5ng/78f1cM73bJ9Lc8/H53u+\n53vM3REREQmLuNwOQEREJJaU2EREJFSU2EREJFSU2EREJFSU2EREJFSU2EREJFSU2EREJFSU2ERE\nJFSU2EREJFQScjsAERGJjfhix7rv3BKz8/mW3z929w4xO2EOUWITEQkJ37mF/Cf2iNn5ts5/rnTM\nTpaDlNhERELDwHSHSX8DIiISKqrYRETCwgCz3I4i16liExGRUFHFJiISJrrHpsQmIhIq6opUV6SI\niISLKjYRkdDQcH9QYhMRCRd1RaorUkREwkUVm4hIWBjqikSJTUQkRExdkagrUkREQkYVm4hImKgr\nUolNRCRU1BWprkgREQkXVWwiIqGhB7RBFZuIiISMKjYRkbDQ+9gAJTYRkXBRV6S6IkVEJGvM7BUz\nW2Nm30W1PWZmi83sGzN7z8xKRG2708xSzOx7M2sf1Z5kZt8G2waaRcpOM8tvZiOC9hlmViUzcSmx\niYiERjB4JFafAxsCdNijbQJQ293rAj8AdwKYWU0gGagVHPO8mcUHx7wA9AaqB59d57wC+NPdjwee\nBB7JTFBKbCIiYRJnsfscgLtPBtbt0faJu+8MVqcDFYPlzsBwd9/m7suAFKCxmZUDirn7dHd3YBjQ\nJeqYocHyKKDNrmouw7+CA0YuIiKSNZcD44LlCsAvUdtWBG0VguU923c7JkiW64GjDnRRDR4REQmL\n2M/uX9rMZketD3L3QZkKxezfwE7gjVgGlBlKbCIiYRLb4f5r3b3hwYdgvYCOQJugexFgJVApareK\nQdtK/umujG6PPmaFmSUAxYE/DnR9dUWKiEjMmFkHoC/Qyd3/jto0BkgORjpWJTJIZKa7rwI2mFnT\n4P5ZT2B01DGXBsvdgE+jEuV+qWITEQmNnJ1Sy8zeAk4l0mW5AriXyCjI/MCEYJzHdHf/l7svMLOR\nwEIiXZTXuntqcKpriIywLEjkntyu+3KDgdfMLIXIIJXkTMWVieQnIiJ5QFyxip6/yfUxO9/WiXfM\nyUpXZG5TV6SIiISKuiJFRMJEU2opsYmIhIaZJkFGXZEiIhIyqthERMJEXZGq2EREJFxUsYmIhInu\nsalik3Ays4Jm9oGZrTezt7NxnovM7JNYxpZbzKyVmX2f23HIoZTjr605LOXdyCUUzOxCM5ttZpvM\nbJWZjTOzljE4dTegLHCUu3fP6knc/Q13bxeDeA4pM3MzOz6jfdx9irufmFMxieQWJTbJNWZ2C/AU\n8BCRJFQZeA7oFIPTHwv8EPVeqCNaMIGsHAl2DfmPxSePUmKTXGFmxYH7icwX9667b3b3He4+1t37\nBvvkN7OnzOzX4POUmeUPtp1qZivM7Nbg1fSrzOyyYNt/gHuA84NK8Aozu8/MXo+6fpWgykkI1nuZ\n2Y9mttHMlpnZRVHtU6OOa25ms4Iuzllm1jxq2+dm9oCZfRmc5xMzK72f778r/r5R8Xcxs7PM7Acz\nW2dmd0Xt39jMvjKzv4J9nzWzfMG2ycFuXwff9/yo8/czs9+AV3e1BcccF1yjQbBe3sx+N7NTs/Uf\nVnLXrtfWqCtSJFc0AwoA72Wwz7+BpkB9oB7QGLg7avsxRF5jUYHIK+SfM7OS7n4vkSpwhLsXcffB\nGQViZoWBgcCZ7l4UaA7M38d+pYAPg32PAp4APjSz6BcfXghcBhwN5ANuy+DSxxD5O6hAJBG/BFwM\nJAGtgP8LZkEHSAVuBkoT+btrQ2TiWNy9dbBPveD7jog6fyki1Wuf6Au7+1KgH/C6mRUCXgWGuvvn\nGcQrkicosUluOYrIu54y6iq8CLjf3de4++/Af4BLorbvCLbvcPePgE1AVu8hpQG1zaygu69y9wX7\n2OdsYIm7v+buO939LWAxcE7UPq+6+w/uvgUYSSQp788OoL+77wCGE0laT7v7xuD6C4kkdNx9jrtP\nD677E/A/4JRMfKd73X1bEM9u3P0lIAWYAZQj8g8JydM0eASU2CT3/EHkVRcZ3fspDyyPWl8etKWf\nY4/E+DdQ5GADcffNwPnAv4BVZvahmZ2UiXh2xVQhav23g4jnj6jXduxKPKujtm/ZdbyZnWBmY83s\nNzPbQKQi3Wc3Z5Tf3X3rAfZ5CagNPOPu2w6wr+QFusemxCa55itgG9Alg31+JdKNtkvloC0rNgOF\notaPid7o7h+7+xlEKpfFRH7hHyieXTGt3Me+sfYCkbiqu3sx4C4id1QykuE7qcysCJHBO4OB+4Ku\nVpE8T4lNcoW7rydyX+m5YNBEITNLNLMzzezRYLe3gLvNrEwwCOMe4PX9nfMA5gOtzaxyMHDlzl0b\nzKysmXUO7rVtI9KlmbaPc3wEnBA8opBgZucDNYGxWYzpYBQFNgCbgmry6j22rwaqHeQ5nwZmu/uV\nRO4dvpjtKCX3qStSiU1yj7v/F7iFyICQ34FfgOuA94NdHgRmA98A3wJzg7asXGsCMCI41xx2T0Zx\nQRy/EnlL7ynsnThw9z+AjsCtRLpS+wId3X1tVmI6SLcRGZiykUg1OWKP7fcBQ4NRkz0OdDIz6wx0\n4J/veQvQYNdoUMnD1BWpN2iLiIRFXIljPf+psRsDtHX0VXnyDdp6aFNEJCzM8nQXYqwosYmIhEke\n7kKMFaV2EREJFVVsIiIhYqrYVLGJiEi4HPEVW3zB4p5Q7OjcDkPysNoVi+d2CJLHzZ07Z627l8nu\neQxVbKDERkKxoyl3wZO5HYbkYV8+3jG3Q5A8rmCi7TlVW9YYB56P5gigrkgREQmVI75iExEJD1NX\nJEpsIiKhosSmrkgREQkZVWwiIiGiik2JTUQkVJTY1BUpIiIho4pNRCQs9BwboMQmIhIapuH+gLoi\nRUQkZFSxiYiEiCo2VWwiIhIyqthEREJEFZsSm4hIqCixqStSRERCRhWbiEhY6Dk2QIlNRCRU1BWp\nrkgREQkZVWwiIiGhmUcilNhEREJEiU1dkSIiEjKq2EREwkQFmxKbiEhomLoiQV2RIiISMqrYRERC\nRBWbEpuISKgosakrUkREQkYVm4hISOgB7QhVbCIikiVm9oqZrTGz76LaSpnZBDNbEvxZMmrbnWaW\nYmbfm1n7qPYkM/s22DbQguxsZvnNbETQPsPMqmQmLiU2EZEwsRh+DmwI0GGPtjuASe5eHZgUrGNm\nNYFkoFZwzPNmFh8c8wLQG6gefHad8wrgT3c/HngSeCQzQSmxiYiERfAcW6w+B+Luk4F1ezR3BoYG\ny0OBLlHtw919m7svA1KAxmZWDijm7tPd3YFhexyz61yjgDaWicCU2EREJJbKuvuqYPk3oGywXAH4\nJWq/FUFbhWB5z/bdjnH3ncB64KgDBaDBIyIiIRLjwSOlzWx21Pogdx+U2YPd3c3MYxlQZiixiYiE\nSIwT21p3b3iQx6w2s3LuviroZlwTtK8EKkXtVzFoWxks79kefcwKM0sAigN/HCgAdUWKiEgsjQEu\nDZYvBUZHtScHIx2rEhkkMjPottxgZk2D+2c99zhm17m6AZ8G9+EypIpNRCRMcvAxNjN7CziVSJfl\nCuBeYAAw0syuAJYDPQDcfYGZjQQWAjuBa909NTjVNURGWBYExgUfgMHAa2aWQmSQSnJm4lJiExEJ\nkZx8QNvdL9jPpjb72b8/0H8f7bOB2vto3wp0P9i41BUpIiKhoopNRCQkMvv8WdipYhORmLjqysup\nXP5okurv3qP0zddfc0rLZjSsX4euXc5hw4YNuRThkSEnH9A+XCmxiUhMXHJpL0aPHb9X+9VXXcmD\nDw1g9vxv6dT5XJ7872O5EJ0cSZTYRCQmWrZqTalSpfZqT1nyAy1btQbg9LZn8P577+R0aEcUVWxK\nbCJyiNWoWYsPxkQeS3p31Nus+OWXAxwh2ZKzkyAflpTYROSQ+t9LrzDoxedp3jiJTZs2ki9fvtwO\nSUJOoyJF5JA68aSTGDvuEwCW/PAD4z76MJcjCre83IUYK6rYROSQWrMmMlVgWloaAx56kN59/pXL\nEUnYKbGJSEz0vPgCTm3VjB++/57jqlRkyCuDARg5/C3q1DyBerVPolz58vTsdVkuRxpiOfw+tsOV\nuiJFJCaGvf7WPtuvu+FGrrvhxhyO5shkQB7ORzGjik1EREJFFZuISGjk7S7EWFFiExEJEeU1JbYj\nzmWtq5LcrBKGMXz6z7zyxbL0bZe2qkLPllVITXM+XbiGAR8sIjHeeKhHXepUKo47/Oe9BUxP+YMC\niXE83yuJY0sXJjXNmbRgNY+MXZyL30z258Tjq1C0SFHi4+MBeOqZ52nWvPl+9y9doghr/9qUrWv2\nvrwXU6Z8QfFixYmLi+PJgc/RtFmzgzrH2A/GsGjRQm7vewdjRr9P9eonUKNmTQDuv+8eWrZqzelt\n2mYrTgknJbYjyAnHFCW5WSU6PzGVHanO0KsaM2nBapav/Ztmxx/FGbXLcuajk9memsZRRSIP0SY3\nqwxAh0cnc1SRfAy5qjGdnpgKwEuf/chXKX+QGG+8cU1TTq1Rhs8X/Z5r30/2b/zEzyhdunSOXvOh\nAY9xXtduTJzwCddfcxWz5n1zUMd3PKcTHc/pBMAHo9/nzLM7pie2e+67P+bxhoW6IjV45IhyfNki\nzF/+F1t3pJGa5sxYuo4OdcsBcFGLY3lh0lK2p6YB8Mem7QBUL1uUaUvWprdt2LKTupVKsHVHGl+l\n/AHAjlRnwYoNHFO8YC58K8mKTZs2cWa7NjRr1ICG9eukT3kVbdWqVbQ9rTVNkuqTVL82U6dOAWDi\nhE84pWUzmjVqwIXJ3dm0KePqrmWr1ixdmgLA1/Pn07pFUxqdXJce3c7lzz//BOC5ZwZyct2aNDq5\nLpdcFHlJ8mtDh3DTDdfx1bRpfDh2DHfdcTtNkurz49Kl9L68F+++M4pPPh7Phcn/vIdy8hefc17n\njlmKMxQs0hUZq09epcR2BPn+t400qlaKEoUSKZAYx2k1j6ZciQIAVDu6MI2rleL9m1sw4rpm1K1U\nHIBFv26gbe2yxMcZFUsVpE6l4unH7FKsYAJtah3Nl0EClMNPh7an0SSpPq2aNwGgQIECjBj1Hl/N\nmsv4iZ9xR99bcffdjhkx/E3OaNeeGXPmM3PO19SrV5+1a9cy4KEH+ejjiXw1ay4Nkhoy8KknMrz2\nh2M/oFbtOgBceVlP+j/8CLPmfUPt2nXo/8B/AHj8sQFMnzWPWfO+4ZnnXtzt+GbNm3N2x048NOAx\nZsyZT7XjjkvfdnqbtsyaOYPNmzcDMGrkCLr3SM5SnBIe6oo8gixdvYkXJy3ltaub8Pf2VBauXE9a\n8MssPs4oXiiRLk9+Sb3KJXiuVxKtHviUkTN+4fiyRfjg1pasXLeFOcv+TD9m13EDezZgyJSf+OWP\nv3Prq8kB7NkV6e7cc/ddfDllMnFxcfy6ciWrV6/mmGOOSd+nYcNGXNX7cnbs2ME5nbpQr359pkz+\ngsWLFnJ66xYAbN+xnSZN9n3v7K47bueRhx6kdJkyvDhoMOvXr+ev9X/RqvUpAFx8yaVcFFRbderU\npVfPi+jUqQvndO6S6e+VkJBAu3Yd+HDsB5zXtRvjxn1I/wGPHlScYWJAXFweLrViRIntCDNyxi+M\nnBGZXf32s09k1V9bAfjtr618/M1vAHz981+kuVOqcD7Wbd7OA+8vTD/+nRub8+OazenrD59fh2W/\nb95tEIoc/oa/+QZr1/7OtJlzSExM5MTjq7Bt69bd9mnZqjUTPp3M+I8+pM8VvbjhplsoUbIkp7c9\nY78PY0fbdY9tl/Xr1+933/fGfMjUKZP5cOwHPDKgP7PnfZvp79L9/GReeP5ZSpUqRYOkhhQtWhR3\nz3ScYZOXuxBjRV2RR5hdg0LKlyhAh7rlGDN3JQCffPsbTasfBUDVMoVJjI9j3ebtFEiMo2C+yGi6\nlieUZmeak7I6cq/i1rNOpGiBRO5/b0EufBPJjvXr11OmzNEkJibyxeef8fPy5Xvts3z5csqWLcvl\nV/am1+VXMm/eXBo3acpX075kaUrkntnmzZtZ8sMPmbpm8eLFKVmiZPq9ujffeI2WrU8hLS2NFb/8\nwimnnkb/hx9h/fr1e90PK1K0KJs2btzneVu1PoX58+byyuCX6N4jcn8uO3FK3qeK7QjzwmVJlCyc\nj52pzv+N+pYNW3YCkUru0Qvq8XG/1uzY6dz65nwAShfNz9B/NcHd+e2vrdzyeqT9mOIFuL5ddVJW\nb+TD21oBMHTKT4yYrndt5QXJF15E1y7n0LB+HRokNeTEk07aa58pX3zOk088RmJCIoWLFGHwq8Mo\nU6YMLw0eQs+LL2D7tm0A3Hv/g1Q/4YRMXfelV4Zy/bX/Ysvff1OlWjUGvfwqqampXHbpxWxYvx7H\nuea6GyhRosRux3Xvkcy1V/fm+WcH8uaIUbtti4+P58yzOvL6sCG8/MpQgGzHmZdpVCTYnjeMjzT5\ny1b3chc8mdthSB62+PGOuR2C5HEFE22OuzfM9nnKneDHXfFcLEICYEH/djGJK6epYhMRCYs8Pkw/\nVpTYRERCIjK7vzKbEtth7P2bW5AvIY7ihfJRIDGO1esjo9b6DJ7NinVbYnadY0sX4ou7T+fut7/l\n9S8jgwj6d6/DrB/X8f6clTG7TvFCiXSsX443pv0MQLkSBfh355pcN3RuzK4hB9aqeRO2b9vGuj/X\nsXXLFsqXrwDAyHfe59gqVWJ+vfvuuZujjirN9TfexGU9L+bcrt3otMeQ/st6XsxXX31J8WKR5yeL\nFC3KpM+nxDwWOTIosR3Gujz5JQDdGlekTqUS3PvOd/vcL84gLZu3Sn/fsJXLT6nKW1/9TGp2T7Yf\nJQolclGLY9MT26q/tiqp5YIp02YAkZk95syZzVMDn83liCIeffzJvRJetJ07d5KQkLDf9cweF26a\n3R803D9Pio8zvnm4PfecW5NxfVtT/9gSfHVfG4oVjPyf9+RjS/D61ZEZJgrli+fxC+vx/s0t+fC2\nVrSpdfQ+z/n7xm3M+nEd5zWssNe2KqULM+xfjfng1paMuL4ZVcsUTm9//+YWjO/bmtvOOpFvHm4P\nQJH8Cbx5TVPG3tqKcX1bc3rNyDX7daxBtTJF+Oj2VvTreBLHli7ER7dHRlSOuaUl1Y4unH7Nt29o\nTs0KxTIdv2Tf4JcGcUff29LXB734Anf2u52lKSk0qFeLSy5Kpn6dGlx0QQ+2bIn0GMyeNYszTj+F\n5o2T6NzxTFavXh3TmO67526u6NWT01q3oPflvXh18Mt079qF9m1P45yz2pOWlkbf224hqX5tGtav\nw7vvREZMfjppIu3anMp5nTvS8OQ6MY3pcKcptZTY8qxiBROZsXQdZz46mbk//bXf/W5ofwJfLPqd\nLk9O5YLnpvPvzjXJn7Dv/+wvTFxKn9OP2+sH+uHz63D3299xzn+n8ujYxfyna20A/tO1FoM+/ZEO\nj05mzYZt6ftv3ZFKn8Gz6fjfKVz8/HT+79xaADwydhE//r6Jsx6bstebAMbO+5Wz65cHIo8SlCiU\nyMKVGw4qfsme7ucnM2b0e+zcGXkEZNjQV7m01+UALFq4kOuuv4n53y6iQP4CvDzof2zbto3bbrmR\nt0a+w7SZc0i+8GLuv/f/snz9vrfdTJOk+jRJqs8VvXqmt3///WLGfTKJV4e9DsDX8+cx/O13GffJ\nJN4Z9TbfL17EzDlfM3b8BPredjNr1qwBYO6c2Tz1zPPM/3ZRlmOSvOmQ1edm5sAT7n5rsH4bUMTd\n7ztU19xHDEOAse4+6kD75jXbdqamzxSSkdYnlebUGmW4um1kfr38ifGUL1mQZb9v3mvfn9ZuZuHK\nDZxzcvn0tmIFEzj52JK8eHlSelt8MGVP/WNL0GvQTABGz13JbWefCET+pdfvnJNoWK0U7k75EgUo\nWTgxwzg/nL+Kl3s34plPltDx5PJ8NH/VQccv2VOsWDFatmzNx+PHUbVqNeLj4zmpRg2WpqRQpWpV\nmjRtCsAFF13M4JcH0fqUU1m0cAFnt4+8OiY1NZUKFStm+fr764o8p1NnChT4Z37Stm3bUbJkSQCm\nfTmVHudfQHx8PMcccwzNW7Rk7pzZ5MuXjyZNm1G5cuUsx5NXqSvy0N5j2wacZ2YPu/tBz45rZgnu\nvvMQxBUK23ak7baemubpP9D5E+PT2w2jz+DZ/JzJeRyfnbCEgZc0YN7yP9PPsG7zds56LPM38s9r\nVJGiBRLp+PgUUtOcr+5rQ/6E+AyPWfnnFv7etpPjyxah48nluS14QPxg45fs6XX5lQx8+gmOPbYK\nPS+9LL19z1+WZoa7U7tO3UM+yKNQocK7rxcuvJ899zguk/uFSh7vQoyVQ9mnsxMYBNy85wYzq2Jm\nn5rZN2Y2ycwqB+1DzOxFM5sBPGpm95nZUDObYmbLzew8M3vUzL41s/Fmlhgcd4+ZzTKz78xskB2B\n/2RZsW4LdSpGRpR1qPvPRLZfLP6dXq2rpK/XqlAsw/Ms+W0Ty9du5pQakXtZG7bsYM2GbbSvEzmn\nGdQoXxSIzCm5qz26yitaIJE/Nm0jNc1peUJpypWIvM5m89adFM6//39LjZ23imvaHk/+hLj0absO\nNn7JnuYtWrBs6VLefedtuvU4P739p2XLmD1rFgAj3nqT5s1bUqNmTX79dSWzZkaq9u3bt7NwQc5O\nr9aiZSveHjmctLQ0Vq9ezVfTvqRBUp57nlhi7FDfrHgOuMjMiu/R/gww1N3rAm8AA6O2VQSau/st\nwfpxwOlAJ+B14DN3rwNsAc4O9nnW3Ru5e22gIHDETQXx1PgfeKB7HUbf0pIdqf9Uc09//AMF8yUw\nvm9rPul3Cjd1OPCUQs9MWEKFkv+8W+36YXO5qEVlxt3emgl3nMLptcoCcN+7C/hXm+MZ17c1lY4q\nxIYtOwB4b/YKkqqWZHzf1pzToDw/rokkqbWbtvPtL+sZ37c1/TruPYXTh/N/pXNSBcbO/zVb8Uv2\nnNu1Gy1btqZ48X/+b3tSjRoMfPoJ6tepwd9b/uaK3n3Inz8/bw4fRb/bb6HRyXVp2uhkZs2ckeXr\nRt9ja5JUn9TU1AMec17Xbpxw4kk0alCXs9u35ZHHnuDoo4/cAUa7nmOL1SevOmRTapnZJncvYmb3\nAzuIJKIi7n6fma0Fyrn7jqDqWuXupYN7Yp+5+9DgHPcBO9y9v5nFBeco4O4enHeduz9lZl2BvkAh\noBTwjLsP2N89NjPrA/QBiC9aJqni5a8ckr+DsCuYL54t2yO/fLokVaB93WO4+tU5uRxVzgvblFqd\nzu7A7f3uTH+9zNKUFC48vxsz5szP5cjCK1ZTahWucKLXuPrFA++YSXP+73RNqbUfTwFzgVczuf+e\nowK2Abh7mpnt8H8ycRqQYGYFgOeBhu7+S5AMC5ABdx9EpJuU/GWrH9mTZWZDvcoluOfcmpgZG/7e\nwW1vfZ3bIUk2/PHHH5zSsikNkhqmJzWRvOiQJzZ3X2dmI4ErgF2l0TQgGXgNuAjIzt3nXUlsrZkV\nAboBoRsFeTianvLHQQ0qkcPbUUcdxXeLluzVftzxx6tay0PychdirOTU4/j/Ba6LWr8eeNXMbgd+\nBy7b51GZ4O5/mdlLwHfAb8Cs7AQqIpKXKa8dwsTm7kWillcTuf+1a305kQEhex7Ta4/1+zI4531R\ny3cDdx/ofCIiEn5HygRqIiLhZ+qKBE2pJSIiIaOKTUQkJCLPseV2FLlPiU1EJDTy9oPVsaKuSBER\nCRVVbCIiIaKCTYlNRCRU1BWprkgREQkZVWwiImGh97EBSmwiIqGx67U1Rzp1RYqISKioYhMRCRFV\nbEpsIiKhorymrkgREQkZVWwiIiGirkhVbCIi4REM94/VJ1OXNLvZzBaY2Xdm9paZFTCzUmY2wcyW\nBH+WjNr/TjNLMbPvzax9VHuSmX0bbBto2cjQSmwiIpIlZlYBuAFo6O61gXggGbgDmOTu1YFJwTpm\nVjPYXgvoADxvZvHB6V4AegPVg0+HrMalxCYiEhIWzO4fq08mJQAFzSwBKAT8CnQGhgbbhwJdguXO\nwHB33+buy4AUoLGZlQOKuft0d3dgWNQxB02JTUREssTdVwKPAz8Dq4D17v4JUNbdVwW7/QaUDZYr\nAL9EnWJF0FYhWN6zPUuU2EREQiTG99hKm9nsqE+f3a9lJYlUYVWB8kBhM7s4ep+gAvOc+fYRGhUp\nIhIicbEdFbnW3RtmsL0tsMzdfwcws3eB5sBqMyvn7quCbsY1wf4rgUpRx1cM2lYGy3u2Z4kqNhER\nyaqfgaZmVigYxdgGWASMAS4N9rkUGB0sjwGSzSy/mVUlMkhkZtBtucHMmgbn6Rl1zEFTxSYiEiI5\n+Ribu88ws1HAXGAnMA8YBBQBRprZFcByoEew/wIzGwksDPa/1t1Tg9NdAwwBCgLjgk+WKLGJiIRE\n5N5Yzj6g7e73Avfu0byNSPW2r/37A/330T4bqB2LmNQVKSIioaKKTUQkROI0o5YSm4hImGiuSHVF\niohIyKhiExEJERVsSmwiIqFhROaLPNKpK1JEREJFFZuISIhoVKQqNhERCRlVbCIiYXFw71ELLSU2\nEZEQUV5TV6SIiISMKjYRkZAwYv4+tjxJiU1EJESU19QVKSIiIaOKTUQkRDQqUolNRCQ0Ii8aze0o\ncp+6IkVEJFRUsYmIhIhGRSqxiYiEitJaBonNzIpldKC7b4h9OCIiItmTUcW2AHB2/wfArnUHKh/C\nuEREJAs0KjKDxObulXIyEBERyZ7IzCO5HUXuy9SoSDNLNrO7guWKZpZ0aMMSERHJmgMmNjN7FjgN\nuCRo+ht48VAGJSIiWRC8tiZWn7wqM6Mim7t7AzObB+Du68ws3yGOS0REJEsyk9h2mFkckQEjmNlR\nQNohjUp0VfrzAAAgAElEQVRERLIkDxdaMZOZxPYc8A5Qxsz+A/QA/nNIoxIRkSzJy12IsXLAxObu\nw8xsDtA2aOru7t8d2rBERESyJrMzj8QDO4h0R2p+SRGRw5CG+0dkZlTkv4G3gPJAReBNM7vzUAcm\nIiIHT6MiM1ex9QROdve/AcysPzAPePhQBiYiIpIVmUlsq/bYLyFoExGRw0zerbNiJ6NJkJ8kck9t\nHbDAzD4O1tsBs3ImPBERySwzvbYGMq7Ydo18XAB8GNU+/dCFIyIikj0ZTYI8OCcDERGR7FPBlol7\nbGZ2HNAfqAkU2NXu7iccwrhERCQL8vJoxljJzDNpQ4BXidyTPBMYCYw4hDGJiIhkWWYSWyF3/xjA\n3Ze6+91EEpyIiBxmzGL3yasyM9x/WzAJ8lIz+xewEih6aMMSEZGDZZhGRZK5xHYzUBi4gci9tuLA\n5YcyKBERkazKzCTIM4LFjfzzslERETnc5PEuxFjJ6AHt9wjewbYv7n7eIYlIREQkGzKq2J7NsShy\nUe2Kxfny8Y65HYbkYSUbXZfbIYik03D/jB/QnpSTgYiISPbpvWL6OxARkZDJ7ItGRUTkMGeoKxIO\nIrGZWX5333YogxERkezRG7Qz9wbtxmb2LbAkWK9nZs8c8shERESyIDP32AYCHYE/ANz9a+C0QxmU\niIhkTZzF7pNXZaYrMs7dl+/Rb5t6iOIREZEsiszxmIczUoxkJrH9YmaNATezeOB64IdDG5aIiEjW\nZCaxXU2kO7IysBqYGLSJiMhhJi93IcbKAe+xufsad09299LBJ9nd1+ZEcCIicnBy+rU1ZlbCzEaZ\n2WIzW2RmzcyslJlNMLMlwZ8lo/a/08xSzOx7M2sf1Z5kZt8G2wZaNvpUM/MG7ZfYx5yR7t4nqxcV\nEZHQeBoY7+7dzCwfUAi4C5jk7gPM7A7gDqCfmdUEkoFaQHlgopmd4O6pwAtAb2AG8BHQARiXlYAy\n0xU5MWq5AHAu8EtWLiYiIoeOQY6+j83MigOtgV4A7r4d2G5mnYFTg92GAp8D/YDOwPDgmehlZpYC\nNDazn4Bi7j49OO8woAuHKrG5+4g9vshrwNSsXExERPKU0mY2O2p9kLsPilqvCvwOvGpm9YA5wI1A\nWXdfFezzG1A2WK4ATI86fkXQtiNY3rM9S7IypVZV/glSREQOIzGeAHituzfMYHsC0AC43t1nmNnT\nRLod07m7m9l+X4F2KGTmHtuf/HOPLQ5Yxx6Bi4jI4SGHH2NbAayIeiH1KCL5YbWZlXP3VWZWDlgT\nbF8JVIo6vmLQtjJY3rM9SzJM7sGolHpAmeBT0t2rufvIrF5QRETCwd1/I/Ks84lBUxtgITAGuDRo\nuxQYHSyPAZLNLL+ZVQWqAzODbssNZtY0yDs9o445aBlWbEEJ+ZG7187qBUREJGeYWY4OHglcD7wR\njIj8EbiMSNE00syuAJYDPQDcfYGZjSSS/HYC1wYjIgGuAYYABYkMGsnSwBHI3D22+WZ2srvPy+pF\nREQkZ+R0XnP3+cC+7sO12c/+/YH++2ifDcSkiNpvYjOzBHffCZwMzDKzpcBmIiNK3d0bxCIAERGR\nWMqoYptJZLRLpxyKRUREsklTamWc2AzA3ZfmUCwiIpINOf2A9uEqo8RWxsxu2d9Gd3/iEMQjIiKS\nLRkltnigCEHlJiIihz8VbBkntlXufn+ORSIiItmTx998HSsZPaCtvx4REclzMqrY9vkMgoiIHL5M\nNcn+E5u7r8vJQEREJHsioyJzO4rcF+OJoEVERHJXVl5bIyIihylVbKrYREQkZFSxiYiEiOlBNiU2\nEZGw0OCRCHVFiohIqKhiExEJC9OUWqDEJiISKprdX12RIiISMqrYRERCQoNHIpTYRERCRD2R6ooU\nEZGQUcUmIhIaRpxm91diExEJC0NdkaCuSNmHTz4eT91aJ1LrpON57NEBuR2OiMhBUWKT3aSmpnLT\nDdcy+oNxzPtmIW8Pf4tFCxfmdlgikhkWGRUZq09epcQmu5k1cybHHXc8VatVI1++fHQ/P5mxH4zO\n7bBEJJPizGL2yauU2GQ3v/66kooVK6WvV6hQkZUrV+ZiRCIiB0eDR0REQkKDRyJUscluypevwIoV\nv6Svr1y5ggoVKuRiRCIiB0eJTXbTsFEjUlKW8NOyZWzfvp23Rwzn7I6dcjssEckk3WNTV6TsISEh\ngSeffpZzzm5Pamoql/a6nJq1auV2WCKSSXk4H8WMEpvspcOZZ9HhzLNyOwwRkSxRYhMRCQlD95dA\niU1EJDwMTH2RSu4iIhIuqtgOQyceX4WiRYoSHx8PwFPPPE+z5s33u3/pEkVY+9embF2z9+W9mDRp\nAot++JH8+fOzdu1aWjRtyPcpP2XrvHsaM/p9qlc/gRo1awJw/3330LJVa05v0zam15HYefHeiziz\ndW1+X7eRht0fSm9/6KYunNW6Ntt3pLJsxVr63Ps66zdtISEhjhfuuYj6J1UiIT6ONz6cyeOvfALA\n6Gev4ZgyxUiIj+fLeUu56eERpKU5V3ZryVU9WpOalsbmv7dx7YNvsfjH33LrK+dpqtdUsR22xk/8\njBlz5jNjzvwMk1osxcfHM/TVVw7pNT4Y/T6LFv0z9+Q9992vpHaYe+2D6XS+9rm92idNX0xS94do\nfP7DLFm+htsvbwdA17YNyJ8vgUY9HqL5RY9wZdcWVC5XCoCL+71Ck/MHkNStP2VKFqHrGQ0AGDFu\nNo16PETT5AE8MXQij9xyXs59wRCJvEFbw/2V2PKITZs2cWa7NjRr1ICG9evwwZi9529ctWoVbU9r\nTZOk+iTVr83UqVMAmDjhE05p2YxmjRpwYXJ3Nm3ad3V33fU38czAJ9m5c+de257472O0aNqIRifX\n5YH/3Jve/nD/B6hb60ROP6UlPS++gCefeByAV15+iRZNG9G4QT2Se3Tl77//5qtp0/hw7BjuuuN2\nmiTV58elS+l9eS/efWcUn3w8nguTu6efd/IXn3Ne544HFb8cGl/OXcq69X/v1T5p+mJSU9MAmPnt\nMiqULQGA4xQqkI/4+DgK5s/H9h2pbNy8FSD9z4SEOBIT4nH33doBChfMh+OH9DtJuCmxHaY6tD2N\nJkn1adW8CQAFChRgxKj3+GrWXMZP/Iw7+t6a/kthlxHD3+SMdu2ZMWc+M+d8Tb169Vm7di0DHnqQ\njz6eyFez5tIgqSEDn3pin9esVLkyzZu35M3XX9utfeKET1i6ZAlTv5rJjDnzmTd3DlOnTGb2rFm8\n/+47zJzzNaPHjmPunNnpx3Q+9zy+nD6LmXO/5qSTajDklcE0a96cszt24qEBjzFjznyqHXdc+v6n\nt2nLrJkz2Lx5MwCjRo6ge4/kg4pfck/Pzs34+MtIJf7uxHn8vXU7yyb054dx9/PUsEn8ueGfxDjm\nuWv5edIANv29jXcnzktvv6pHaxaMuZf+N3bh1kdH5fh3CAuL4Sev0j22w9T4iZ9RunTp9HV35567\n7+LLKZOJi4vj15UrWb16Ncccc0z6Pg0bNuKq3pezY8cOzunUhXr16zNl8hcsXrSQ01u3AGD7ju00\nadJsv9e9vd+ddO/amQ5nnZ3eNnHCJ0yc+AlNG54MwKbNm0hZsoSNGzfSsVNnChQoQIECBTjr7HPS\nj1m44Dvuu+du1v/1F5s2b+KMM9pn+H0TEhJo164DH479gPO6dmPcuA/pP+DRg45fcl7fK9qTmprG\n8I9mAdCoVhVSU9Oo1u7flCxaiImv3MynMxbz08o/AOh07XPkz5fAkId6cWqjE/l0xmIA/jdyMv8b\nOZnzOzTkjis70Pue1/Z7Tdm/PNyDGDNKbHnE8DffYO3a35k2cw6JiYmceHwVtm3duts+LVu1ZsKn\nkxn/0Yf0uaIXN9x0CyVKluT0tmcw7PW3MnWd46tXp269+rzz9sj0Nnfn9r53cmWfq3bb95mnn9rv\neXpf0YuRo96nbr16vDZ0CJO/+PyA1+5+fjIvPP8spUqVokFSQ4oWLYq7H1T8krMuPqcJZ7WuzZlX\nDUxv63FmQz6ZtpCdO9P4/c9NfDX/R5JqVk5PbADbtu/kg8+/4ZxT66Qntl1GfjyHp+86P8e+g4SP\nuiLziPXr11OmzNEkJibyxeef8fPy5Xvts3z5csqWLcvlV/am1+VXMm/eXBo3acpX075kaUoKAJs3\nb2bJDz9keK1+d/ybp558PH39jHbtGTrklfR7WytXrmTNmjU0a96Cj8Z+wNatW9m0aRPjPhqbfsym\njRs5plw5duzYwfC33khvL1K0KJs2btzndVu1PoX58+byyuCX6N4jGSBL8UvOOKN5DW7p1ZZuN/2P\nLVt3pLev+G0dpzY6EYBCBfLRuG4Vvv9pNYUL5uOY0sUAiI+P48yWtfj+p9UAHFe5TPrxZ7aqRcov\nv+fgNwkTwyx2n7xKFVsekXzhRXTtcg4N69ehQVJDTjzppL32mfLF5zz5xGMkJiRSuEgRBr86jDJl\nyvDS4CH0vPgCtm/bBsC99z9I9RNO2O+1ataqRf2TGzB/3lwA2p7RjsWLFnFqy0gXYOEiRXh16Os0\nbNSIs8/pRKMGdTn66LLUql2H4sWKA3DPfQ/QukUTSpcuQ6PGTdKTWfceyVx7dW+ef3Ygb47Y/T5K\nfHw8Z57VkdeHDeHlV4YCZCl+ia2hD/eiVVJ1SpcoQsr4B3jgxY8Y+v5XPNmvB/nzJTD2hesAmPnt\nT9zQfzgvjpjMoP9czJxR/8YMXhs9ne+W/MrRpYoy6qmryJeYQFycMXn2El4aNRWAq89vzWlNTmLH\nzlT+2vA3vf9vWG5+ZcnjbM8BCEeapKSG/uWM2QfeUfZp06ZNFClShL///pszTmvNsy8M4uQGDXI7\nrBxVstF1uR2C5HFb5z83x90bZvc8x9Ws5w+98VEsQgIguUHFmMSV01SxSbZce3UfFi9cyNZtW7n4\nkkuPuKQmcrjJy12IsaLEJtky9LU3czsEEZHdKLHlQa2aN2H7tm2s+3MdW7dsoXz5yBuuR77zPsdW\nqRLz6913z90cdVRprr/xpr3ahw19lTKl/7nxP/HzKRQtWjTmMUj2TR52G/nyJVCqWCEKFEjk1zXr\nAehx8yB+XrUuZtepVqk0s0fexQ/L15AvMZ4vZi3h5gEjD3zgHsY8dy0X3v4yiQnxdG3XgJeD+3EV\ny5bg4ZvP5ZI7Xo1ZzGGiek2JLU+aMm0GAK8NHcKcObN5auCzuRbLzbfcvlfCi7Zz504SEhL2u74/\n7o67Exengbux0rpnZKTrxec0IalmZW5+5O197hcXZ6SlZe/e+w/L19A0eQAJCXF88tKNnH1KHT78\n4tuDOkenYBqvapVKc2W3lumJbcXqv5TU9kez+wMa7h8qg18axB19b0tfH/TiC9zZ73aWpqTQoF4t\nLrkomfp1anDRBT3YsmULALNnzeKM00+heeMkOnc8k9WrV2c7jlcHv0z3rl1o3/Y0zjmrPZ9Omki7\nNqdyXueONDy5DgD/ffxRkurXJql+bZ5/9hkAlqakcHLdmvS65CIa1KvFqlWrsh2LHFh8fByrJj/K\nY7d1ZeaIO2lUuwop4x+geJGCADSuU4UPX4wMkClcMB+D/nMxU167ja/e6sdZrWtneO6dO9OY8c1P\nHFepDGbGI7eex+y372LWyLs4t219AMqXKc6kV25m+vA7mP32XTStVxUgPYYHb+jMCccezfThd/DA\nDZ2oVqk004ffAcDUN/pS/dij06836ZWbqXtChYOOU8JFFVuIdD8/maaNTubBhwaQkJDAsKGvpg+b\nX7RwIS/8bzBNmjblil49eXnQ/+jzr6u57ZYbGfXeGEqXLs1bb77B/ff+H8+9OCjT13zyicd4fdgQ\nAI4qXZqPPp4IwNfz5zFj9nxKlizJp5MmMnfObOZ+s5DKlSszc8YMRrz5BlO/msXOnTtp1bwxrU85\nlYIFC/L94sW8/MowkhrmuYFYeVqJooWYOjeF2x9/J8P97upzJhOmLaLPva9TomhBJr92O5OmL2bb\n9r3nF4XIc2ynNDqBu58eTdczTubEqmVpfP7DlClZhKmv92XqnBQuOLsRH03+lv8OmUhcnFEwf+Ju\n57h74GiqVSpD0+QBQKSC2+Wdj+fQtV0DBrw0ngpHl6Bk8UJ888NK+t/Y+aDiDAu9aDQixxObmXUB\n3gNquPtiM6sCNHf3N4Pt9YHy7p6lMatm9hPQ0N3XxibivKNYsWK0bNmaj8ePo2rVasTHx3NSjRos\nTUmhStWqNGnaFIALLrqYwS8PovUpp7Jo4QLObh+ZXT81NZUKFSse1DX31xXZtm07SpYsmb7epGkz\nKleuDMC0aVPpcl5XChaMVATndOrCl1On0PaMdlQ77jgltVywbfsORn/69QH3a9OsBu1a1OLWy84A\noEC+BCodU4qUn9fstt+uCistzRnz2dd8OmMxT/Trzsjxc0hLc1b/sZFp85fSoFZlZi/4mWfvTiZ/\nvkQ++Pwbvv1hZabjfmfCXEY99S8GvDSebu0b8O6EeQcVZxipKzJ3KrYLgKnBn/cCVYALgV3D6+oD\nDYHYPYxxBOl1+ZUMfPoJjj22Cj0vvSy9fc8fdjPD3aldpy6TPp8S8zgKFS6c4fr+FC6Uuf0ktrZs\n27Hb+s7UNOLiIj8z+fP9U0GZQY9bBrFsRcb/btx1jy0zvpj1A+2vfJoOrWrz8gOX8OSQiQwfl7ln\nS39e9Sebt2zjpGrH0K1dA3rf+/pBxSnhlKNVq5kVAVoCVwDJQfMAoJWZzTezfsD9wPnB+vlm1tjM\nvjKzeWY2zcxODM4Vb2aPm9l3ZvaNmV2/x7UKmtk4M+udg18x1zVv0YJlS5fy7jtv063HP/Pt/bRs\nGbNnRSapHfHWmzRv3pIaNWvy668rmTVzJgDbt29n4YIFhzzGFi1aMeb999iyZQubNm1i7AejadGy\n1SG/rmTe8l/XcXKNSIW9614YwMRpi7gm+ZT09XonZr7C/3JuCt3bJ2FmHF2qKM3qVWPugp+pXK4k\nv/2xgVfe/ZLXRk+n3kmVdjtu0+ZtFC2Uf7/nHfXxXG6/rB358iWkv5w0O3HmdZrdP+crts7AeHf/\nwcz+MLMk4A7gNnfvCGBmq4l0JV4XrBcDWrn7TjNrCzwEdAX6EKn26gfbSkVdpwgwHBjm7kfc3Dzn\ndu3G94sXU7x48fS2k2rUYODTT/DN1/OpVbsOV/TuQ/78+Xlz+ChuvfkGNm7YQGpaKjfedCs1a9XK\n9LWi77EBjHr/gwMe06hxY7onX0DLZo0A6N3namrXqZM+H6Tkvgdf/Ijn77mA9Ru3MHXuP/9d+v9v\nHI/d3pVZI+8iLs5Y+svv9Lg5c/dk3504n8Z1qzJr5J24Q78n3uX3PzfRs3NTbrj4dHbsTGXT39u4\n4u6hux23Zt1G5i36hVkj72L81O949b1pe5x3Ho/ceh73v/BhTOLM63KjJ9LM4oHZwEp37xj8Ph5B\n5Hf0T0APd/8z2PdOIsVNKnCDu38ctCcBQ4CCRHrsbvQsTo2Vo1NqmdlY4Gl3n2BmNwCVgbHsnth6\nsXtiqwQMBKoDDiS6+0lm9g7wortP2OMaPwHrgUfd/Q32wcz6EEmMVKpcOemHpXtPKJyXdTq7A7f3\nu5NWrSP/Yl2aksKF53djxpz5uRxZOGlKLcmuWE2pdXytev7f4R/HIiQAutQtl6m4zOwWIreQigWJ\n7VFgnbsPMLM7gJLu3s/MagJvAY2B8sBE4AR3TzWzmcANwAwiiW2gu4/LStw51hUZZPDTgZeD5HM7\n0IMDV7wPAJ+5e23gHKBAJi73JdDB9nMX1d0HuXtDd28Y/XBxXvfHH39Qu0Z1SpQsmZ7UROTIERkV\naTH7ZOqaZhWBs4GXo5o7A7tK76FAl6j24e6+zd2XASlAYzMrRyQpTg+qtGFRxxy0nLzH1g14zd2P\ndfcq7l4JWAakAdFTVWzcY704sGuYVK+o9gnAVWaWAOmJc5d7gD+B52L6DQ5zRx11FN8tWrLXu8uO\nO/54VWsiRwiz2H2A0mY2O+rTZx+XfAroS+R3+S5l3X3Xg6i/AWWD5QrAL1H7rQjaKgTLe7ZnSU4m\ntguIDPOP9g6RQSSpZva1md0MfAbU3DV4BHgUeNjM5rH7PcGXgZ+Bb8zsayIjK6PdCBQMSmIRETl4\na3f1bgWf3W5UmllHYI27z9nfCYIKLEdfI5Njg0fc/bR9tA3c175Aoz3Wo1++dXdw7E7gluATfc4q\nUauXISJyxDAsZ8cztgA6mdlZRG4TFTOz14HVZlbO3VcF3Yy7HiBcCUQPe60YtK0MlvdszxI9pC4i\nIlni7ne6e8WgoEgGPnX3i4ExwKXBbpcCo4PlMUCymeU3s6pEBgXODLotN5hZ02BsRM+oYw6aptQS\nEQmRw2TikQHASDO7AlhOZKAg7r7AzEYCC4GdwLXunhoccw3/DPcfF3yyRIlNRCQkdo2KzA3u/jnw\nebD8B9BmP/v1B/rvo302EJPZqtUVKSIioaKKTUQkLOyw6YrMVUpsIiIhosSmrkgREQkZVWwiIiGS\nw8+xHZaU2EREQsKAOOU1dUWKiEi4qGITEQkRdUUqsYmIhIpGRaorUkREQkYVm4hIiKgrUolNRCQ0\nNCoyQl2RIiISKqrYRERCI8dfNHpYUsUmIiKhoopNRCQsNLs/oMQmIhIqymvqihQRkZBRxSYiEhKR\n4f6q2ZTYRERCRGlNXZEiIhIyqthERMJEJZsSm4hImOgBbXVFiohIyKhiExEJEQ2KVGITEQkV5TV1\nRYqISMioYhMRCROVbEpsIiJhYWhUJKgrUkREQkYVm4hIWOi1NYAqNhERCRlVbCIiIaKCTYlNRCRc\nlNnUFSkiIuGiik1EJDRMw/1RYhMRCRWNilRXpIiIhIwqNhGRkDA0dgSU2EREwkWZTV2RIiISLqrY\nRERCRKMildhEREJFoyLVFSkiIiGjik1EJERUsKliExGRkFHFJiISFnqQDVBiExEJFY2KVFekiIiE\njCo2EZGQMDTcH5TYRERCRXlNXZEiIhIySmwiImFiMfwc6FJmlczsMzNbaGYLzOzGoL2UmU0wsyXB\nnyWjjrnTzFLM7Hszax/VnmRm3wbbBpplvVNViU1EJEQshv/LhJ3Are5eE2gKXGtmNYE7gEnuXh2Y\nFKwTbEsGagEdgOfNLD441wtAb6B68OmQ1b8DJTYREckSd1/l7nOD5Y3AIqAC0BkYGuw2FOgSLHcG\nhrv7NndfBqQAjc2sHFDM3ae7uwPDoo45aBo8IiISIrk1KtLMqgAnAzOAsu6+Ktj0G1A2WK4ATI86\nbEXQtiNY3rM9S5TYRERCJMZ5rbSZzY5aH+Tug/a6plkR4B3gJnffEH17zN3dzDy2YWVMiU1ERPZn\nrbs3zGgHM0skktTecPd3g+bVZlbO3VcF3YxrgvaVQKWowysGbSuD5T3bs0T32EREwiRnR0UaMBhY\n5O5PRG0aA1waLF8KjI5qTzaz/GZWlcggkZlBt+UGM2sanLNn1DEHTRWbiEhIRPJRjt5kawFcAnxr\nZvODtruAAcBIM7sCWA70AHD3BWY2ElhIZETlte6eGhx3DTAEKAiMCz5ZosQmIiJZ4u5T2X9t12Y/\nx/QH+u+jfTZQOxZxKbGJiISFaa5I0D02EREJGVVsIiIhooJNiY25c+esLZhoy3M7jsNcaWBtbgch\neZp+hjJ2bMzOpMymxObuZXI7hsOdmc0+0LMsIhnRz5DkpCM+sYmIhEemJy8ONSU2EZEQ0ahIjYqU\nzNlrbjiRg6SfIckxqtjkgPY16anIwdDPUM7I5ExYoafEJiISJsps6ooUEZFwUcUmIhIiGhWpxCbZ\nZGY1gHLAFHffkdvxSN5hZubuOfoCyiOBRkUqsUn2JRN5cWCqmU1TcpPM2pXUzKwp8JO7/5bLIUlI\n6B6bZNd/gJ+A84GWwdt0RfbLzE42s3zB8nFEXmGyM3ejCo8cfM/oYUuJTQ5a8IZbANw9jcgvplUo\nuUnm3Ad8ECS3ZcB6YDuAmcWZWXwuxpa3Ba+tidUnr1Jik4MSfV/EzNqZ2alACeBB4Gciya25kpvs\nycziANy9M/AnMBIoQqTiLxRsSwPy5VKIEhK6xyYHJSqp3QKcS+QV772Bl939ITPrB/QBUoGpuRao\nHFaCfxClBctl3D3ZzEYDXxH5WSlnZqlAIrDKzO509y25GHIelodLrRhRYpODZmZtgdPcvZWZPQw0\nBi4wM9z9ETO7GUjJ3SjlcBL1D6IbgIZmdrW7dzazF4E2wKNAPJHq/3slNckOJTY5oH0My/4FuN7M\negGNgLOAJ4H7zCzR3Z/MhTDlMGdm5wKXAh3dfTOAu//LzN4GHgC6uLsGkWSDkbfvjcWK7rFJhva4\np9bEzEoCy9z9J6A68IK7rwK+Ab4G5udasHK4qwaMcfdVZpa46z7s/7d35zF2VnUYx7+PBYQuFAMB\nFNCWpeyLLQgFJQ2WTSkSAoSyVhqWEoigFtGiwUQDhmiElEVABKMCGllFUhYjIFIoVMpmyxqliFBQ\nlrKIwOMf5wy5jC1MyzB37nufT3PTufd95z1nJjPzu+e8v/M7tvcHngE+0dbeNUSyIjNii/fREtSO\nAaYDDwI3SLoMeAC4RNJYYF/KO/Fn29bZGDSWsvj6KeBzkla1/VI97wBgoe2pA97JaKwEtliiXiO1\nNYGtKPfStgV2BaYCMymp2tsD+9p+rE3djUGk18/OvsDLwGLgBuBg4AhJCyj302YAk9rV1ybKVGQC\nWyxBrz9MxwFrA5vbfh6YVdO2JwInAWfa/n37ehuDTa9EkYMoe7GdBBxLyZg9jvImaWVgsu0n2tTV\nRkqtyNxjiyXo9W77cOAuYF1Jl9fj1wO3UlKz81sU/0fSp4EvAROAdYFngQuB7W3PsH0QcJjt+9vX\ny2iqBLZ4R2tFEUnjKNNG59u+BtgQGCPpUgDbVwPfr6O46HKSVqvlsZC0FfAaMJkS3Ha1vTNwAXC5\npMn7U6MAAAedSURBVEMAbC9uV38bLdkjmYqMotf0437AppTqEBMk3WV7Xk0SeVzSxban9KRsR3eT\ntAIwBthL0seBNYCDbb9as2h/VU/9F/AjYHZ7etodOjge9ZsEtgDeNf24B+VeyO6U4HYIsLekt+u0\n0WhJo9vX0xhM6huiN2syyLeA8cBJtl+tp6wA7C5pY0qSyATbT7apu9ElMhUZ76h1H6cBc2z/1/Z9\nwNXAMOAgSZsD5GZ/ANTR2B716RhKzcezgbGSJgHYnglcQVnjuHeC2oerPwsgd3J2ZUZsXWwJa42e\noFTpX1/S1rbn2b69LqTdhbKINqLHisBOkr4DYHu8pDUomZCTJL1AKZP1BnBpT63I+HAlKzKBrWv1\nuqc2ibIf1gvA8cCZwP4904+2/yjpztTvCwBJa9v+p+1nJT0DbEYZlWH7OUnXUn6evgFsDXw+QS0G\nUqYiu5ykYymbhX4WuAg4sT5WA6ZI2gwgQS0AJG0C/EPSjyUdBJxHyXxcJOmc+obpCeBG4AhgB9sP\nt7HL3SdZkQls3UbSJyUNs+1aUeQASgbbDGBH4Bhgf8rmoUMo648ieiwG/kyZsp4KnAuMBGYBLwEz\nJR1KeXP0ku2n2tXR6F4JbF1E0lrA14BpkobXuo7PUXcvtv1v4ARgy1rYeLrt59rW4Rh0bC+kLNgf\nS8mcvRk4lFKd/1pgdWAKMNP2623qZlfLgC2BrdssAuZQqqh/uS7IfhS4rK5FAvgUpcrIEMp9kgjg\nXQv4TwZMWa/2NDAOuJ9yf3YhcLjth9rSyUhWJEke6QqSNgI+YnuBpF9SChfvCRxp+2RJ5wK3SrqP\nUtD4YNtvtbHLMQjV6eueP3ePAD+kBLUTbV9V7789U0f+EW2TwNZwklYHFgDPSfou8BalKO1IYENJ\nR9ueJml7SlHaH2SdWixNzaR9Q9IvgFuAs21fVY/Nb2vngpLs38FDrX6SwNZwtp+XNBG4iTL1vDVw\nOSUJ4A1gy/ou/Ge2/9O+nkYnqaP/k4FRkoa2VBqJNhKdPYXYXxLYuoDtP0jaHTiLEtjWoiy4PpCy\nfcjGwKVAAlssi9mUDWYjBpUEti5h+0ZJX6fser2D7UskXUOpHjHU9ovt7WF0GtvzJR2Y0VoMNgls\nXcT2dZLeBmZLGp8tZ+KDSlAbfDIVmcDWdWxfL2kl4CZJ41LqKCKaJoGtC9m+WtLNCWoRzZOsyAS2\nrpXdiyMaqMMXVveXVB6JiIhGyYgtIqIhOr3GY39JYIuIaJJEtkxFRueR9JakeyU9IOk3koZ+gGtN\nkPS7+vHetZrG0s5dre5ft6xtnFrXEPbp9V7nXCxpv2Voa5SkB5a1jxFNksAWneg129vY3oJSFuyY\n1oMqlvln2/Y1tk9/j1NWA5Y5sEUMJPXjv06VwBad7jZKMedRkhZI+jmlusp6knaTdIekuXVkNxxA\n0h6S5kuaS0tJKElTJM2sH68l6UpJ8+pjR+B0YIM6Wjyjnjdd0hxJ99Ui0z3XmiHpYUl/opQse0+S\njqzXmSfpt71GoRMl3V2vt1c9f4ikM1raPvqDfiMjmiKBLTpW3UNuT8peYAAbAefY3hx4BTgFmGh7\nLHA38FVJKwMXAJMoW66svZTLnwXcYntryqaaD1L2IXusjhanS9qttvkZYBtgnKSdJY2j1OHcBvgC\nsF0fvpwrbG9X2/srZXfqHqNqG18Ezqtfw1TgRdvb1esfKWl0H9qJhst+bEkeic60iqR768e3AT+l\nbJ76N9uz6+s7AJsBt9ctxFYC7gA2AZ6w/QhA3X7lqCW0sQtwGEDdm+5FSR/rdc5u9fGX+nw4JdCN\nAK7sKTdVa3K+ny0kfY8y3TkcmNVy7Nd1Mf0jkh6vX8NuwFYt999G1rYf7kNb0WAdHI/6TQJbdKLX\nbG/T+kINXq+0vgTcaHtyr/Pe9XkfkIDTbP+kVxsnLMe1Lgb2sT1P0hRgQssx9zrXte3jbbcGQCSN\nWo62IxolU5HRVLOBnSRtCCBpmKQxwHzKHmIb1PMmL+Xzbwam1c8dImkk8DJlNNZjFnBEy727dSSt\nCdwK7CNpFUkjKNOe72cE8LSkFYGDex3bX9JHap/Xp2wcOwuYVs9H0hhJw/rQTjSd+vHRl+bKPesF\nkh59r6zigZQRWzSS7UV15HOppI/Wl0+x/bCko4DrJL1KmcocsYRLfAU4X9JUyq7j02zfIen2mk5/\nfb3PtilwRx0xLgYOsT1X0uXAPOBZYE4fuvxt4E5gUf2/tU9/B+4CVgWOsf26pAsp997mqjS+CNin\nb9+daLKBzGaUNAQ4G9gVWAjMkXSN7YcGrBNL6lfZ6T0iIjrd2HHb+vbZd/fb9YaupHtsb7u045LG\nA6fa3r0+/yaA7dP6rRPLISO2iIiGEAOezbgO8GTL84XA9gPagyVIYIuIaIi5c++ZtcqKWqMfL7my\npNYh4Pm2z+/H638oEtgiIhrC9h4D3ORTwHotz9etr7VVsiIjImJ5zQE2kjRa0kqUwgR9Wbf5ocqI\nLSIilovtNyUdR1l+MgS4yPaDbe5WsiIjIqJZMhUZERGNksAWERGNksAWERGNksAWERGNksAWERGN\nksAWERGNksAWERGNksAWERGN8j+AYoG1DXJndgAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7f1a4828dac8>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plot(actual_value = df.loc[:,'Actual'].values.astype(int),\n",
    "     pred_value = df.loc[:,'Prediction'].values.astype(int))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:41:05.717403Z",
     "start_time": "2017-07-23T23:41:05.711124Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Actual\n",
       "0.0     9711\n",
       "1.0    12833\n",
       "Name: Actual, dtype: int64"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df_.groupby(by=\"Actual\").Actual.count()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:41:06.887660Z",
     "start_time": "2017-07-23T23:41:06.604894Z"
    }
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbYAAAGkCAYAAABdFwDgAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XmcTnX/x/HXZxb7HskaSmWXsW8thEooS9MmLXS371R3\nv+qulJa7ReutFNqQFlIUWpDstFjKSIpEUrasM5/fH9cxXbYxZi4z5ng/78f1cM73bJ9Lc8/H53u+\n53vM3REREQmLuNwOQEREJJaU2EREJFSU2EREJFSU2EREJFSU2EREJFSU2EREJFSU2EREJFSU2ERE\nJFSU2EREJFQScjsAERGJjfhix7rv3BKz8/mW3z929w4xO2EOUWITEQkJ37mF/Cf2iNn5ts5/rnTM\nTpaDlNhERELDwHSHSX8DIiISKqrYRETCwgCz3I4i16liExGRUFHFJiISJrrHpsQmIhIq6opUV6SI\niISLKjYRkdDQcH9QYhMRCRd1RaorUkREwkUVm4hIWBjqikSJTUQkRExdkagrUkREQkYVm4hImKgr\nUolNRCRU1BWprkgREQkXVWwiIqGhB7RBFZuIiISMKjYRkbDQ+9gAJTYRkXBRV6S6IkVEJGvM7BUz\nW2Nm30W1PWZmi83sGzN7z8xKRG2708xSzOx7M2sf1Z5kZt8G2waaRcpOM8tvZiOC9hlmViUzcSmx\niYiERjB4JFafAxsCdNijbQJQ293rAj8AdwKYWU0gGagVHPO8mcUHx7wA9AaqB59d57wC+NPdjwee\nBB7JTFBKbCIiYRJnsfscgLtPBtbt0faJu+8MVqcDFYPlzsBwd9/m7suAFKCxmZUDirn7dHd3YBjQ\nJeqYocHyKKDNrmouw7+CA0YuIiKSNZcD44LlCsAvUdtWBG0VguU923c7JkiW64GjDnRRDR4REQmL\n2M/uX9rMZketD3L3QZkKxezfwE7gjVgGlBlKbCIiYRLb4f5r3b3hwYdgvYCOQJugexFgJVApareK\nQdtK/umujG6PPmaFmSUAxYE/DnR9dUWKiEjMmFkHoC/Qyd3/jto0BkgORjpWJTJIZKa7rwI2mFnT\n4P5ZT2B01DGXBsvdgE+jEuV+qWITEQmNnJ1Sy8zeAk4l0mW5AriXyCjI/MCEYJzHdHf/l7svMLOR\nwEIiXZTXuntqcKpriIywLEjkntyu+3KDgdfMLIXIIJXkTMWVieQnIiJ5QFyxip6/yfUxO9/WiXfM\nyUpXZG5TV6SIiISKuiJFRMJEU2opsYmIhIaZJkFGXZEiIhIyqthERMJEXZGq2EREJFxUsYmIhInu\nsalik3Ays4Jm9oGZrTezt7NxnovM7JNYxpZbzKyVmX2f23HIoZTjr605LOXdyCUUzOxCM5ttZpvM\nbJWZjTOzljE4dTegLHCUu3fP6knc/Q13bxeDeA4pM3MzOz6jfdx9irufmFMxieQWJTbJNWZ2C/AU\n8BCRJFQZeA7oFIPTHwv8EPVeqCNaMIGsHAl2DfmPxSePUmKTXGFmxYH7icwX9667b3b3He4+1t37\nBvvkN7OnzOzX4POUmeUPtp1qZivM7Nbg1fSrzOyyYNt/gHuA84NK8Aozu8/MXo+6fpWgykkI1nuZ\n2Y9mttHMlpnZRVHtU6OOa25ms4Iuzllm1jxq2+dm9oCZfRmc5xMzK72f778r/r5R8Xcxs7PM7Acz\nW2dmd0Xt39jMvjKzv4J9nzWzfMG2ycFuXwff9/yo8/czs9+AV3e1BcccF1yjQbBe3sx+N7NTs/Uf\nVnLXrtfWqCtSJFc0AwoA72Wwz7+BpkB9oB7QGLg7avsxRF5jUYHIK+SfM7OS7n4vkSpwhLsXcffB\nGQViZoWBgcCZ7l4UaA7M38d+pYAPg32PAp4APjSz6BcfXghcBhwN5ANuy+DSxxD5O6hAJBG/BFwM\nJAGtgP8LZkEHSAVuBkoT+btrQ2TiWNy9dbBPveD7jog6fyki1Wuf6Au7+1KgH/C6mRUCXgWGuvvn\nGcQrkicosUluOYrIu54y6iq8CLjf3de4++/Af4BLorbvCLbvcPePgE1AVu8hpQG1zaygu69y9wX7\n2OdsYIm7v+buO939LWAxcE7UPq+6+w/uvgUYSSQp788OoL+77wCGE0laT7v7xuD6C4kkdNx9jrtP\nD677E/A/4JRMfKd73X1bEM9u3P0lIAWYAZQj8g8JydM0eASU2CT3/EHkVRcZ3fspDyyPWl8etKWf\nY4/E+DdQ5GADcffNwPnAv4BVZvahmZ2UiXh2xVQhav23g4jnj6jXduxKPKujtm/ZdbyZnWBmY83s\nNzPbQKQi3Wc3Z5Tf3X3rAfZ5CagNPOPu2w6wr+QFusemxCa55itgG9Alg31+JdKNtkvloC0rNgOF\notaPid7o7h+7+xlEKpfFRH7hHyieXTGt3Me+sfYCkbiqu3sx4C4id1QykuE7qcysCJHBO4OB+4Ku\nVpE8T4lNcoW7rydyX+m5YNBEITNLNLMzzezRYLe3gLvNrEwwCOMe4PX9nfMA5gOtzaxyMHDlzl0b\nzKysmXUO7rVtI9KlmbaPc3wEnBA8opBgZucDNYGxWYzpYBQFNgCbgmry6j22rwaqHeQ5nwZmu/uV\nRO4dvpjtKCX3qStSiU1yj7v/F7iFyICQ34FfgOuA94NdHgRmA98A3wJzg7asXGsCMCI41xx2T0Zx\nQRy/EnlL7ynsnThw9z+AjsCtRLpS+wId3X1tVmI6SLcRGZiykUg1OWKP7fcBQ4NRkz0OdDIz6wx0\n4J/veQvQYNdoUMnD1BWpN2iLiIRFXIljPf+psRsDtHX0VXnyDdp6aFNEJCzM8nQXYqwosYmIhEke\n7kKMFaV2EREJFVVsIiIhYqrYVLGJiEi4HPEVW3zB4p5Q7OjcDkPysNoVi+d2CJLHzZ07Z627l8nu\neQxVbKDERkKxoyl3wZO5HYbkYV8+3jG3Q5A8rmCi7TlVW9YYB56P5gigrkgREQmVI75iExEJD1NX\nJEpsIiKhosSmrkgREQkZVWwiIiGiik2JTUQkVJTY1BUpIiIho4pNRCQs9BwboMQmIhIapuH+gLoi\nRUQkZFSxiYiEiCo2VWwiIhIyqthEREJEFZsSm4hIqCixqStSRERCRhWbiEhY6Dk2QIlNRCRU1BWp\nrkgREQkZVWwiIiGhmUcilNhEREJEiU1dkSIiEjKq2EREwkQFmxKbiEhomLoiQV2RIiISMqrYRERC\nRBWbEpuISKgosakrUkREQkYVm4hISOgB7QhVbCIikiVm9oqZrTGz76LaSpnZBDNbEvxZMmrbnWaW\nYmbfm1n7qPYkM/s22DbQguxsZvnNbETQPsPMqmQmLiU2EZEwsRh+DmwI0GGPtjuASe5eHZgUrGNm\nNYFkoFZwzPNmFh8c8wLQG6gefHad8wrgT3c/HngSeCQzQSmxiYiERfAcW6w+B+Luk4F1ezR3BoYG\ny0OBLlHtw919m7svA1KAxmZWDijm7tPd3YFhexyz61yjgDaWicCU2EREJJbKuvuqYPk3oGywXAH4\nJWq/FUFbhWB5z/bdjnH3ncB64KgDBaDBIyIiIRLjwSOlzWx21Pogdx+U2YPd3c3MYxlQZiixiYiE\nSIwT21p3b3iQx6w2s3LuviroZlwTtK8EKkXtVzFoWxks79kefcwKM0sAigN/HCgAdUWKiEgsjQEu\nDZYvBUZHtScHIx2rEhkkMjPottxgZk2D+2c99zhm17m6AZ8G9+EypIpNRCRMcvAxNjN7CziVSJfl\nCuBeYAAw0syuAJYDPQDcfYGZjQQWAjuBa909NTjVNURGWBYExgUfgMHAa2aWQmSQSnJm4lJiExEJ\nkZx8QNvdL9jPpjb72b8/0H8f7bOB2vto3wp0P9i41BUpIiKhoopNRCQkMvv8WdipYhORmLjqysup\nXP5okurv3qP0zddfc0rLZjSsX4euXc5hw4YNuRThkSEnH9A+XCmxiUhMXHJpL0aPHb9X+9VXXcmD\nDw1g9vxv6dT5XJ7872O5EJ0cSZTYRCQmWrZqTalSpfZqT1nyAy1btQbg9LZn8P577+R0aEcUVWxK\nbCJyiNWoWYsPxkQeS3p31Nus+OWXAxwh2ZKzkyAflpTYROSQ+t9LrzDoxedp3jiJTZs2ki9fvtwO\nSUJOoyJF5JA68aSTGDvuEwCW/PAD4z76MJcjCre83IUYK6rYROSQWrMmMlVgWloaAx56kN59/pXL\nEUnYKbGJSEz0vPgCTm3VjB++/57jqlRkyCuDARg5/C3q1DyBerVPolz58vTsdVkuRxpiOfw+tsOV\nuiJFJCaGvf7WPtuvu+FGrrvhxhyO5shkQB7ORzGjik1EREJFFZuISGjk7S7EWFFiExEJEeU1JbYj\nzmWtq5LcrBKGMXz6z7zyxbL0bZe2qkLPllVITXM+XbiGAR8sIjHeeKhHXepUKo47/Oe9BUxP+YMC\niXE83yuJY0sXJjXNmbRgNY+MXZyL30z258Tjq1C0SFHi4+MBeOqZ52nWvPl+9y9doghr/9qUrWv2\nvrwXU6Z8QfFixYmLi+PJgc/RtFmzgzrH2A/GsGjRQm7vewdjRr9P9eonUKNmTQDuv+8eWrZqzelt\n2mYrTgknJbYjyAnHFCW5WSU6PzGVHanO0KsaM2nBapav/Ztmxx/FGbXLcuajk9memsZRRSIP0SY3\nqwxAh0cnc1SRfAy5qjGdnpgKwEuf/chXKX+QGG+8cU1TTq1Rhs8X/Z5r30/2b/zEzyhdunSOXvOh\nAY9xXtduTJzwCddfcxWz5n1zUMd3PKcTHc/pBMAHo9/nzLM7pie2e+67P+bxhoW6IjV45IhyfNki\nzF/+F1t3pJGa5sxYuo4OdcsBcFGLY3lh0lK2p6YB8Mem7QBUL1uUaUvWprdt2LKTupVKsHVHGl+l\n/AHAjlRnwYoNHFO8YC58K8mKTZs2cWa7NjRr1ICG9eukT3kVbdWqVbQ9rTVNkuqTVL82U6dOAWDi\nhE84pWUzmjVqwIXJ3dm0KePqrmWr1ixdmgLA1/Pn07pFUxqdXJce3c7lzz//BOC5ZwZyct2aNDq5\nLpdcFHlJ8mtDh3DTDdfx1bRpfDh2DHfdcTtNkurz49Kl9L68F+++M4pPPh7Phcn/vIdy8hefc17n\njlmKMxQs0hUZq09epcR2BPn+t400qlaKEoUSKZAYx2k1j6ZciQIAVDu6MI2rleL9m1sw4rpm1K1U\nHIBFv26gbe2yxMcZFUsVpE6l4unH7FKsYAJtah3Nl0EClMNPh7an0SSpPq2aNwGgQIECjBj1Hl/N\nmsv4iZ9xR99bcffdjhkx/E3OaNeeGXPmM3PO19SrV5+1a9cy4KEH+ejjiXw1ay4Nkhoy8KknMrz2\nh2M/oFbtOgBceVlP+j/8CLPmfUPt2nXo/8B/AHj8sQFMnzWPWfO+4ZnnXtzt+GbNm3N2x048NOAx\nZsyZT7XjjkvfdnqbtsyaOYPNmzcDMGrkCLr3SM5SnBIe6oo8gixdvYkXJy3ltaub8Pf2VBauXE9a\n8MssPs4oXiiRLk9+Sb3KJXiuVxKtHviUkTN+4fiyRfjg1pasXLeFOcv+TD9m13EDezZgyJSf+OWP\nv3Prq8kB7NkV6e7cc/ddfDllMnFxcfy6ciWrV6/mmGOOSd+nYcNGXNX7cnbs2ME5nbpQr359pkz+\ngsWLFnJ66xYAbN+xnSZN9n3v7K47bueRhx6kdJkyvDhoMOvXr+ev9X/RqvUpAFx8yaVcFFRbderU\npVfPi+jUqQvndO6S6e+VkJBAu3Yd+HDsB5zXtRvjxn1I/wGPHlScYWJAXFweLrViRIntCDNyxi+M\nnBGZXf32s09k1V9bAfjtr618/M1vAHz981+kuVOqcD7Wbd7OA+8vTD/+nRub8+OazenrD59fh2W/\nb95tEIoc/oa/+QZr1/7OtJlzSExM5MTjq7Bt69bd9mnZqjUTPp3M+I8+pM8VvbjhplsoUbIkp7c9\nY78PY0fbdY9tl/Xr1+933/fGfMjUKZP5cOwHPDKgP7PnfZvp79L9/GReeP5ZSpUqRYOkhhQtWhR3\nz3ScYZOXuxBjRV2RR5hdg0LKlyhAh7rlGDN3JQCffPsbTasfBUDVMoVJjI9j3ebtFEiMo2C+yGi6\nlieUZmeak7I6cq/i1rNOpGiBRO5/b0EufBPJjvXr11OmzNEkJibyxeef8fPy5Xvts3z5csqWLcvl\nV/am1+VXMm/eXBo3acpX075kaUrkntnmzZtZ8sMPmbpm8eLFKVmiZPq9ujffeI2WrU8hLS2NFb/8\nwimnnkb/hx9h/fr1e90PK1K0KJs2btzneVu1PoX58+byyuCX6N4jcn8uO3FK3qeK7QjzwmVJlCyc\nj52pzv+N+pYNW3YCkUru0Qvq8XG/1uzY6dz65nwAShfNz9B/NcHd+e2vrdzyeqT9mOIFuL5ddVJW\nb+TD21oBMHTKT4yYrndt5QXJF15E1y7n0LB+HRokNeTEk07aa58pX3zOk088RmJCIoWLFGHwq8Mo\nU6YMLw0eQs+LL2D7tm0A3Hv/g1Q/4YRMXfelV4Zy/bX/Ysvff1OlWjUGvfwqqampXHbpxWxYvx7H\nuea6GyhRosRux3Xvkcy1V/fm+WcH8uaIUbtti4+P58yzOvL6sCG8/MpQgGzHmZdpVCTYnjeMjzT5\ny1b3chc8mdthSB62+PGOuR2C5HEFE22OuzfM9nnKneDHXfFcLEICYEH/djGJK6epYhMRCYs8Pkw/\nVpTYRERCIjK7vzKbEtth7P2bW5AvIY7ihfJRIDGO1esjo9b6DJ7NinVbYnadY0sX4ou7T+fut7/l\n9S8jgwj6d6/DrB/X8f6clTG7TvFCiXSsX443pv0MQLkSBfh355pcN3RuzK4hB9aqeRO2b9vGuj/X\nsXXLFsqXrwDAyHfe59gqVWJ+vfvuuZujjirN9TfexGU9L+bcrt3otMeQ/st6XsxXX31J8WKR5yeL\nFC3KpM+nxDwWOTIosR3Gujz5JQDdGlekTqUS3PvOd/vcL84gLZu3Sn/fsJXLT6nKW1/9TGp2T7Yf\nJQolclGLY9MT26q/tiqp5YIp02YAkZk95syZzVMDn83liCIeffzJvRJetJ07d5KQkLDf9cweF26a\n3R803D9Pio8zvnm4PfecW5NxfVtT/9gSfHVfG4oVjPyf9+RjS/D61ZEZJgrli+fxC+vx/s0t+fC2\nVrSpdfQ+z/n7xm3M+nEd5zWssNe2KqULM+xfjfng1paMuL4ZVcsUTm9//+YWjO/bmtvOOpFvHm4P\nQJH8Cbx5TVPG3tqKcX1bc3rNyDX7daxBtTJF+Oj2VvTreBLHli7ER7dHRlSOuaUl1Y4unH7Nt29o\nTs0KxTIdv2Tf4JcGcUff29LXB734Anf2u52lKSk0qFeLSy5Kpn6dGlx0QQ+2bIn0GMyeNYszTj+F\n5o2T6NzxTFavXh3TmO67526u6NWT01q3oPflvXh18Mt079qF9m1P45yz2pOWlkbf224hqX5tGtav\nw7vvREZMfjppIu3anMp5nTvS8OQ6MY3pcKcptZTY8qxiBROZsXQdZz46mbk//bXf/W5ofwJfLPqd\nLk9O5YLnpvPvzjXJn7Dv/+wvTFxKn9OP2+sH+uHz63D3299xzn+n8ujYxfyna20A/tO1FoM+/ZEO\nj05mzYZt6ftv3ZFKn8Gz6fjfKVz8/HT+79xaADwydhE//r6Jsx6bstebAMbO+5Wz65cHIo8SlCiU\nyMKVGw4qfsme7ucnM2b0e+zcGXkEZNjQV7m01+UALFq4kOuuv4n53y6iQP4CvDzof2zbto3bbrmR\nt0a+w7SZc0i+8GLuv/f/snz9vrfdTJOk+jRJqs8VvXqmt3///WLGfTKJV4e9DsDX8+cx/O13GffJ\nJN4Z9TbfL17EzDlfM3b8BPredjNr1qwBYO6c2Tz1zPPM/3ZRlmOSvOmQ1edm5sAT7n5rsH4bUMTd\n7ztU19xHDEOAse4+6kD75jXbdqamzxSSkdYnlebUGmW4um1kfr38ifGUL1mQZb9v3mvfn9ZuZuHK\nDZxzcvn0tmIFEzj52JK8eHlSelt8MGVP/WNL0GvQTABGz13JbWefCET+pdfvnJNoWK0U7k75EgUo\nWTgxwzg/nL+Kl3s34plPltDx5PJ8NH/VQccv2VOsWDFatmzNx+PHUbVqNeLj4zmpRg2WpqRQpWpV\nmjRtCsAFF13M4JcH0fqUU1m0cAFnt4+8OiY1NZUKFStm+fr764o8p1NnChT4Z37Stm3bUbJkSQCm\nfTmVHudfQHx8PMcccwzNW7Rk7pzZ5MuXjyZNm1G5cuUsx5NXqSvy0N5j2wacZ2YPu/tBz45rZgnu\nvvMQxBUK23ak7baemubpP9D5E+PT2w2jz+DZ/JzJeRyfnbCEgZc0YN7yP9PPsG7zds56LPM38s9r\nVJGiBRLp+PgUUtOcr+5rQ/6E+AyPWfnnFv7etpPjyxah48nluS14QPxg45fs6XX5lQx8+gmOPbYK\nPS+9LL19z1+WZoa7U7tO3UM+yKNQocK7rxcuvJ899zguk/uFSh7vQoyVQ9mnsxMYBNy85wYzq2Jm\nn5rZN2Y2ycwqB+1DzOxFM5sBPGpm95nZUDObYmbLzew8M3vUzL41s/Fmlhgcd4+ZzTKz78xskB2B\n/2RZsW4LdSpGRpR1qPvPRLZfLP6dXq2rpK/XqlAsw/Ms+W0Ty9du5pQakXtZG7bsYM2GbbSvEzmn\nGdQoXxSIzCm5qz26yitaIJE/Nm0jNc1peUJpypWIvM5m89adFM6//39LjZ23imvaHk/+hLj0absO\nNn7JnuYtWrBs6VLefedtuvU4P739p2XLmD1rFgAj3nqT5s1bUqNmTX79dSWzZkaq9u3bt7NwQc5O\nr9aiZSveHjmctLQ0Vq9ezVfTvqRBUp57nlhi7FDfrHgOuMjMiu/R/gww1N3rAm8AA6O2VQSau/st\nwfpxwOlAJ+B14DN3rwNsAc4O9nnW3Ru5e22gIHDETQXx1PgfeKB7HUbf0pIdqf9Uc09//AMF8yUw\nvm9rPul3Cjd1OPCUQs9MWEKFkv+8W+36YXO5qEVlxt3emgl3nMLptcoCcN+7C/hXm+MZ17c1lY4q\nxIYtOwB4b/YKkqqWZHzf1pzToDw/rokkqbWbtvPtL+sZ37c1/TruPYXTh/N/pXNSBcbO/zVb8Uv2\nnNu1Gy1btqZ48X/+b3tSjRoMfPoJ6tepwd9b/uaK3n3Inz8/bw4fRb/bb6HRyXVp2uhkZs2ckeXr\nRt9ja5JUn9TU1AMec17Xbpxw4kk0alCXs9u35ZHHnuDoo4/cAUa7nmOL1SevOmRTapnZJncvYmb3\nAzuIJKIi7n6fma0Fyrn7jqDqWuXupYN7Yp+5+9DgHPcBO9y9v5nFBeco4O4enHeduz9lZl2BvkAh\noBTwjLsP2N89NjPrA/QBiC9aJqni5a8ckr+DsCuYL54t2yO/fLokVaB93WO4+tU5uRxVzgvblFqd\nzu7A7f3uTH+9zNKUFC48vxsz5szP5cjCK1ZTahWucKLXuPrFA++YSXP+73RNqbUfTwFzgVczuf+e\nowK2Abh7mpnt8H8ycRqQYGYFgOeBhu7+S5AMC5ABdx9EpJuU/GWrH9mTZWZDvcoluOfcmpgZG/7e\nwW1vfZ3bIUk2/PHHH5zSsikNkhqmJzWRvOiQJzZ3X2dmI4ErgF2l0TQgGXgNuAjIzt3nXUlsrZkV\nAboBoRsFeTianvLHQQ0qkcPbUUcdxXeLluzVftzxx6tay0PychdirOTU4/j/Ba6LWr8eeNXMbgd+\nBy7b51GZ4O5/mdlLwHfAb8Cs7AQqIpKXKa8dwsTm7kWillcTuf+1a305kQEhex7Ta4/1+zI4531R\ny3cDdx/ofCIiEn5HygRqIiLhZ+qKBE2pJSIiIaOKTUQkJCLPseV2FLlPiU1EJDTy9oPVsaKuSBER\nCRVVbCIiIaKCTYlNRCRU1BWprkgREQkZVWwiImGh97EBSmwiIqGx67U1Rzp1RYqISKioYhMRCRFV\nbEpsIiKhorymrkgREQkZVWwiIiGirkhVbCIi4REM94/VJ1OXNLvZzBaY2Xdm9paZFTCzUmY2wcyW\nBH+WjNr/TjNLMbPvzax9VHuSmX0bbBto2cjQSmwiIpIlZlYBuAFo6O61gXggGbgDmOTu1YFJwTpm\nVjPYXgvoADxvZvHB6V4AegPVg0+HrMalxCYiEhIWzO4fq08mJQAFzSwBKAT8CnQGhgbbhwJdguXO\nwHB33+buy4AUoLGZlQOKuft0d3dgWNQxB02JTUREssTdVwKPAz8Dq4D17v4JUNbdVwW7/QaUDZYr\nAL9EnWJF0FYhWN6zPUuU2EREQiTG99hKm9nsqE+f3a9lJYlUYVWB8kBhM7s4ep+gAvOc+fYRGhUp\nIhIicbEdFbnW3RtmsL0tsMzdfwcws3eB5sBqMyvn7quCbsY1wf4rgUpRx1cM2lYGy3u2Z4kqNhER\nyaqfgaZmVigYxdgGWASMAS4N9rkUGB0sjwGSzSy/mVUlMkhkZtBtucHMmgbn6Rl1zEFTxSYiEiI5\n+Ribu88ws1HAXGAnMA8YBBQBRprZFcByoEew/wIzGwksDPa/1t1Tg9NdAwwBCgLjgk+WKLGJiIRE\n5N5Yzj6g7e73Avfu0byNSPW2r/37A/330T4bqB2LmNQVKSIioaKKTUQkROI0o5YSm4hImGiuSHVF\niohIyKhiExEJERVsSmwiIqFhROaLPNKpK1JEREJFFZuISIhoVKQqNhERCRlVbCIiYXFw71ELLSU2\nEZEQUV5TV6SIiISMKjYRkZAwYv4+tjxJiU1EJESU19QVKSIiIaOKTUQkRDQqUolNRCQ0Ii8aze0o\ncp+6IkVEJFRUsYmIhIhGRSqxiYiEitJaBonNzIpldKC7b4h9OCIiItmTUcW2AHB2/wfArnUHKh/C\nuEREJAs0KjKDxObulXIyEBERyZ7IzCO5HUXuy9SoSDNLNrO7guWKZpZ0aMMSERHJmgMmNjN7FjgN\nuCRo+ht48VAGJSIiWRC8tiZWn7wqM6Mim7t7AzObB+Du68ws3yGOS0REJEsyk9h2mFkckQEjmNlR\nQNohjUp0VfrzAAAgAElEQVRERLIkDxdaMZOZxPYc8A5Qxsz+A/QA/nNIoxIRkSzJy12IsXLAxObu\nw8xsDtA2aOru7t8d2rBERESyJrMzj8QDO4h0R2p+SRGRw5CG+0dkZlTkv4G3gPJAReBNM7vzUAcm\nIiIHT6MiM1ex9QROdve/AcysPzAPePhQBiYiIpIVmUlsq/bYLyFoExGRw0zerbNiJ6NJkJ8kck9t\nHbDAzD4O1tsBs3ImPBERySwzvbYGMq7Ydo18XAB8GNU+/dCFIyIikj0ZTYI8OCcDERGR7FPBlol7\nbGZ2HNAfqAkU2NXu7iccwrhERCQL8vJoxljJzDNpQ4BXidyTPBMYCYw4hDGJiIhkWWYSWyF3/xjA\n3Ze6+91EEpyIiBxmzGL3yasyM9x/WzAJ8lIz+xewEih6aMMSEZGDZZhGRZK5xHYzUBi4gci9tuLA\n5YcyKBERkazKzCTIM4LFjfzzslERETnc5PEuxFjJ6AHt9wjewbYv7n7eIYlIREQkGzKq2J7NsShy\nUe2Kxfny8Y65HYbkYSUbXZfbIYik03D/jB/QnpSTgYiISPbpvWL6OxARkZDJ7ItGRUTkMGeoKxIO\nIrGZWX5333YogxERkezRG7Qz9wbtxmb2LbAkWK9nZs8c8shERESyIDP32AYCHYE/ANz9a+C0QxmU\niIhkTZzF7pNXZaYrMs7dl+/Rb5t6iOIREZEsiszxmIczUoxkJrH9YmaNATezeOB64IdDG5aIiEjW\nZCaxXU2kO7IysBqYGLSJiMhhJi93IcbKAe+xufsad09299LBJ9nd1+ZEcCIicnBy+rU1ZlbCzEaZ\n2WIzW2RmzcyslJlNMLMlwZ8lo/a/08xSzOx7M2sf1Z5kZt8G2wZaNvpUM/MG7ZfYx5yR7t4nqxcV\nEZHQeBoY7+7dzCwfUAi4C5jk7gPM7A7gDqCfmdUEkoFaQHlgopmd4O6pwAtAb2AG8BHQARiXlYAy\n0xU5MWq5AHAu8EtWLiYiIoeOQY6+j83MigOtgV4A7r4d2G5mnYFTg92GAp8D/YDOwPDgmehlZpYC\nNDazn4Bi7j49OO8woAuHKrG5+4g9vshrwNSsXExERPKU0mY2O2p9kLsPilqvCvwOvGpm9YA5wI1A\nWXdfFezzG1A2WK4ATI86fkXQtiNY3rM9S7IypVZV/glSREQOIzGeAHituzfMYHsC0AC43t1nmNnT\nRLod07m7m9l+X4F2KGTmHtuf/HOPLQ5Yxx6Bi4jI4SGHH2NbAayIeiH1KCL5YbWZlXP3VWZWDlgT\nbF8JVIo6vmLQtjJY3rM9SzJM7sGolHpAmeBT0t2rufvIrF5QRETCwd1/I/Ks84lBUxtgITAGuDRo\nuxQYHSyPAZLNLL+ZVQWqAzODbssNZtY0yDs9o445aBlWbEEJ+ZG7187qBUREJGeYWY4OHglcD7wR\njIj8EbiMSNE00syuAJYDPQDcfYGZjSSS/HYC1wYjIgGuAYYABYkMGsnSwBHI3D22+WZ2srvPy+pF\nREQkZ+R0XnP3+cC+7sO12c/+/YH++2ifDcSkiNpvYjOzBHffCZwMzDKzpcBmIiNK3d0bxCIAERGR\nWMqoYptJZLRLpxyKRUREsklTamWc2AzA3ZfmUCwiIpINOf2A9uEqo8RWxsxu2d9Gd3/iEMQjIiKS\nLRkltnigCEHlJiIihz8VbBkntlXufn+ORSIiItmTx998HSsZPaCtvx4REclzMqrY9vkMgoiIHL5M\nNcn+E5u7r8vJQEREJHsioyJzO4rcF+OJoEVERHJXVl5bIyIihylVbKrYREQkZFSxiYiEiOlBNiU2\nEZGw0OCRCHVFiohIqKhiExEJC9OUWqDEJiISKprdX12RIiISMqrYRERCQoNHIpTYRERCRD2R6ooU\nEZGQUcUmIhIaRpxm91diExEJC0NdkaCuSNmHTz4eT91aJ1LrpON57NEBuR2OiMhBUWKT3aSmpnLT\nDdcy+oNxzPtmIW8Pf4tFCxfmdlgikhkWGRUZq09epcQmu5k1cybHHXc8VatVI1++fHQ/P5mxH4zO\n7bBEJJPizGL2yauU2GQ3v/66kooVK6WvV6hQkZUrV+ZiRCIiB0eDR0REQkKDRyJUscluypevwIoV\nv6Svr1y5ggoVKuRiRCIiB0eJTXbTsFEjUlKW8NOyZWzfvp23Rwzn7I6dcjssEckk3WNTV6TsISEh\ngSeffpZzzm5Pamoql/a6nJq1auV2WCKSSXk4H8WMEpvspcOZZ9HhzLNyOwwRkSxRYhMRCQlD95dA\niU1EJDwMTH2RSu4iIhIuqtgOQyceX4WiRYoSHx8PwFPPPE+z5s33u3/pEkVY+9embF2z9+W9mDRp\nAot++JH8+fOzdu1aWjRtyPcpP2XrvHsaM/p9qlc/gRo1awJw/3330LJVa05v0zam15HYefHeiziz\ndW1+X7eRht0fSm9/6KYunNW6Ntt3pLJsxVr63Ps66zdtISEhjhfuuYj6J1UiIT6ONz6cyeOvfALA\n6Gev4ZgyxUiIj+fLeUu56eERpKU5V3ZryVU9WpOalsbmv7dx7YNvsfjH33LrK+dpqtdUsR22xk/8\njBlz5jNjzvwMk1osxcfHM/TVVw7pNT4Y/T6LFv0z9+Q9992vpHaYe+2D6XS+9rm92idNX0xS94do\nfP7DLFm+htsvbwdA17YNyJ8vgUY9HqL5RY9wZdcWVC5XCoCL+71Ck/MHkNStP2VKFqHrGQ0AGDFu\nNo16PETT5AE8MXQij9xyXs59wRCJvEFbw/2V2PKITZs2cWa7NjRr1ICG9evwwZi9529ctWoVbU9r\nTZOk+iTVr83UqVMAmDjhE05p2YxmjRpwYXJ3Nm3ad3V33fU38czAJ9m5c+de257472O0aNqIRifX\n5YH/3Jve/nD/B6hb60ROP6UlPS++gCefeByAV15+iRZNG9G4QT2Se3Tl77//5qtp0/hw7BjuuuN2\nmiTV58elS+l9eS/efWcUn3w8nguTu6efd/IXn3Ne544HFb8cGl/OXcq69X/v1T5p+mJSU9MAmPnt\nMiqULQGA4xQqkI/4+DgK5s/H9h2pbNy8FSD9z4SEOBIT4nH33doBChfMh+OH9DtJuCmxHaY6tD2N\nJkn1adW8CQAFChRgxKj3+GrWXMZP/Iw7+t6a/kthlxHD3+SMdu2ZMWc+M+d8Tb169Vm7di0DHnqQ\njz6eyFez5tIgqSEDn3pin9esVLkyzZu35M3XX9utfeKET1i6ZAlTv5rJjDnzmTd3DlOnTGb2rFm8\n/+47zJzzNaPHjmPunNnpx3Q+9zy+nD6LmXO/5qSTajDklcE0a96cszt24qEBjzFjznyqHXdc+v6n\nt2nLrJkz2Lx5MwCjRo6ge4/kg4pfck/Pzs34+MtIJf7uxHn8vXU7yyb054dx9/PUsEn8ueGfxDjm\nuWv5edIANv29jXcnzktvv6pHaxaMuZf+N3bh1kdH5fh3CAuL4Sev0j22w9T4iZ9RunTp9HV35567\n7+LLKZOJi4vj15UrWb16Ncccc0z6Pg0bNuKq3pezY8cOzunUhXr16zNl8hcsXrSQ01u3AGD7ju00\nadJsv9e9vd+ddO/amQ5nnZ3eNnHCJ0yc+AlNG54MwKbNm0hZsoSNGzfSsVNnChQoQIECBTjr7HPS\nj1m44Dvuu+du1v/1F5s2b+KMM9pn+H0TEhJo164DH479gPO6dmPcuA/pP+DRg45fcl7fK9qTmprG\n8I9mAdCoVhVSU9Oo1u7flCxaiImv3MynMxbz08o/AOh07XPkz5fAkId6cWqjE/l0xmIA/jdyMv8b\nOZnzOzTkjis70Pue1/Z7Tdm/PNyDGDNKbHnE8DffYO3a35k2cw6JiYmceHwVtm3duts+LVu1ZsKn\nkxn/0Yf0uaIXN9x0CyVKluT0tmcw7PW3MnWd46tXp269+rzz9sj0Nnfn9r53cmWfq3bb95mnn9rv\neXpf0YuRo96nbr16vDZ0CJO/+PyA1+5+fjIvPP8spUqVokFSQ4oWLYq7H1T8krMuPqcJZ7WuzZlX\nDUxv63FmQz6ZtpCdO9P4/c9NfDX/R5JqVk5PbADbtu/kg8+/4ZxT66Qntl1GfjyHp+86P8e+g4SP\nuiLziPXr11OmzNEkJibyxeef8fPy5Xvts3z5csqWLcvlV/am1+VXMm/eXBo3acpX075kaUoKAJs3\nb2bJDz9keK1+d/ybp558PH39jHbtGTrklfR7WytXrmTNmjU0a96Cj8Z+wNatW9m0aRPjPhqbfsym\njRs5plw5duzYwfC33khvL1K0KJs2btzndVu1PoX58+byyuCX6N4jGSBL8UvOOKN5DW7p1ZZuN/2P\nLVt3pLev+G0dpzY6EYBCBfLRuG4Vvv9pNYUL5uOY0sUAiI+P48yWtfj+p9UAHFe5TPrxZ7aqRcov\nv+fgNwkTwyx2n7xKFVsekXzhRXTtcg4N69ehQVJDTjzppL32mfLF5zz5xGMkJiRSuEgRBr86jDJl\nyvDS4CH0vPgCtm/bBsC99z9I9RNO2O+1ataqRf2TGzB/3lwA2p7RjsWLFnFqy0gXYOEiRXh16Os0\nbNSIs8/pRKMGdTn66LLUql2H4sWKA3DPfQ/QukUTSpcuQ6PGTdKTWfceyVx7dW+ef3Ygb47Y/T5K\nfHw8Z57VkdeHDeHlV4YCZCl+ia2hD/eiVVJ1SpcoQsr4B3jgxY8Y+v5XPNmvB/nzJTD2hesAmPnt\nT9zQfzgvjpjMoP9czJxR/8YMXhs9ne+W/MrRpYoy6qmryJeYQFycMXn2El4aNRWAq89vzWlNTmLH\nzlT+2vA3vf9vWG5+ZcnjbM8BCEeapKSG/uWM2QfeUfZp06ZNFClShL///pszTmvNsy8M4uQGDXI7\nrBxVstF1uR2C5HFb5z83x90bZvc8x9Ws5w+98VEsQgIguUHFmMSV01SxSbZce3UfFi9cyNZtW7n4\nkkuPuKQmcrjJy12IsaLEJtky9LU3czsEEZHdKLHlQa2aN2H7tm2s+3MdW7dsoXz5yBuuR77zPsdW\nqRLz6913z90cdVRprr/xpr3ahw19lTKl/7nxP/HzKRQtWjTmMUj2TR52G/nyJVCqWCEKFEjk1zXr\nAehx8yB+XrUuZtepVqk0s0fexQ/L15AvMZ4vZi3h5gEjD3zgHsY8dy0X3v4yiQnxdG3XgJeD+3EV\ny5bg4ZvP5ZI7Xo1ZzGGiek2JLU+aMm0GAK8NHcKcObN5auCzuRbLzbfcvlfCi7Zz504SEhL2u74/\n7o67Exengbux0rpnZKTrxec0IalmZW5+5O197hcXZ6SlZe/e+w/L19A0eQAJCXF88tKNnH1KHT78\n4tuDOkenYBqvapVKc2W3lumJbcXqv5TU9kez+wMa7h8qg18axB19b0tfH/TiC9zZ73aWpqTQoF4t\nLrkomfp1anDRBT3YsmULALNnzeKM00+heeMkOnc8k9WrV2c7jlcHv0z3rl1o3/Y0zjmrPZ9Omki7\nNqdyXueONDy5DgD/ffxRkurXJql+bZ5/9hkAlqakcHLdmvS65CIa1KvFqlWrsh2LHFh8fByrJj/K\nY7d1ZeaIO2lUuwop4x+geJGCADSuU4UPX4wMkClcMB+D/nMxU167ja/e6sdZrWtneO6dO9OY8c1P\nHFepDGbGI7eex+y372LWyLs4t219AMqXKc6kV25m+vA7mP32XTStVxUgPYYHb+jMCccezfThd/DA\nDZ2oVqk004ffAcDUN/pS/dij06836ZWbqXtChYOOU8JFFVuIdD8/maaNTubBhwaQkJDAsKGvpg+b\nX7RwIS/8bzBNmjblil49eXnQ/+jzr6u57ZYbGfXeGEqXLs1bb77B/ff+H8+9OCjT13zyicd4fdgQ\nAI4qXZqPPp4IwNfz5zFj9nxKlizJp5MmMnfObOZ+s5DKlSszc8YMRrz5BlO/msXOnTtp1bwxrU85\nlYIFC/L94sW8/MowkhrmuYFYeVqJooWYOjeF2x9/J8P97upzJhOmLaLPva9TomhBJr92O5OmL2bb\n9r3nF4XIc2ynNDqBu58eTdczTubEqmVpfP7DlClZhKmv92XqnBQuOLsRH03+lv8OmUhcnFEwf+Ju\n57h74GiqVSpD0+QBQKSC2+Wdj+fQtV0DBrw0ngpHl6Bk8UJ888NK+t/Y+aDiDAu9aDQixxObmXUB\n3gNquPtiM6sCNHf3N4Pt9YHy7p6lMatm9hPQ0N3XxibivKNYsWK0bNmaj8ePo2rVasTHx3NSjRos\nTUmhStWqNGnaFIALLrqYwS8PovUpp7Jo4QLObh+ZXT81NZUKFSse1DX31xXZtm07SpYsmb7epGkz\nKleuDMC0aVPpcl5XChaMVATndOrCl1On0PaMdlQ77jgltVywbfsORn/69QH3a9OsBu1a1OLWy84A\noEC+BCodU4qUn9fstt+uCistzRnz2dd8OmMxT/Trzsjxc0hLc1b/sZFp85fSoFZlZi/4mWfvTiZ/\nvkQ++Pwbvv1hZabjfmfCXEY99S8GvDSebu0b8O6EeQcVZxipKzJ3KrYLgKnBn/cCVYALgV3D6+oD\nDYHYPYxxBOl1+ZUMfPoJjj22Cj0vvSy9fc8fdjPD3aldpy6TPp8S8zgKFS6c4fr+FC6Uuf0ktrZs\n27Hb+s7UNOLiIj8z+fP9U0GZQY9bBrFsRcb/btx1jy0zvpj1A+2vfJoOrWrz8gOX8OSQiQwfl7ln\nS39e9Sebt2zjpGrH0K1dA3rf+/pBxSnhlKNVq5kVAVoCVwDJQfMAoJWZzTezfsD9wPnB+vlm1tjM\nvjKzeWY2zcxODM4Vb2aPm9l3ZvaNmV2/x7UKmtk4M+udg18x1zVv0YJlS5fy7jtv063HP/Pt/bRs\nGbNnRSapHfHWmzRv3pIaNWvy668rmTVzJgDbt29n4YIFhzzGFi1aMeb999iyZQubNm1i7AejadGy\n1SG/rmTe8l/XcXKNSIW9614YwMRpi7gm+ZT09XonZr7C/3JuCt3bJ2FmHF2qKM3qVWPugp+pXK4k\nv/2xgVfe/ZLXRk+n3kmVdjtu0+ZtFC2Uf7/nHfXxXG6/rB358iWkv5w0O3HmdZrdP+crts7AeHf/\nwcz+MLMk4A7gNnfvCGBmq4l0JV4XrBcDWrn7TjNrCzwEdAX6EKn26gfbSkVdpwgwHBjm7kfc3Dzn\ndu3G94sXU7x48fS2k2rUYODTT/DN1/OpVbsOV/TuQ/78+Xlz+ChuvfkGNm7YQGpaKjfedCs1a9XK\n9LWi77EBjHr/gwMe06hxY7onX0DLZo0A6N3namrXqZM+H6Tkvgdf/Ijn77mA9Ru3MHXuP/9d+v9v\nHI/d3pVZI+8iLs5Y+svv9Lg5c/dk3504n8Z1qzJr5J24Q78n3uX3PzfRs3NTbrj4dHbsTGXT39u4\n4u6hux23Zt1G5i36hVkj72L81O949b1pe5x3Ho/ceh73v/BhTOLM63KjJ9LM4oHZwEp37xj8Ph5B\n5Hf0T0APd/8z2PdOIsVNKnCDu38ctCcBQ4CCRHrsbvQsTo2Vo1NqmdlY4Gl3n2BmNwCVgbHsnth6\nsXtiqwQMBKoDDiS6+0lm9g7wortP2OMaPwHrgUfd/Q32wcz6EEmMVKpcOemHpXtPKJyXdTq7A7f3\nu5NWrSP/Yl2aksKF53djxpz5uRxZOGlKLcmuWE2pdXytev7f4R/HIiQAutQtl6m4zOwWIreQigWJ\n7VFgnbsPMLM7gJLu3s/MagJvAY2B8sBE4AR3TzWzmcANwAwiiW2gu4/LStw51hUZZPDTgZeD5HM7\n0IMDV7wPAJ+5e23gHKBAJi73JdDB9nMX1d0HuXtDd28Y/XBxXvfHH39Qu0Z1SpQsmZ7UROTIERkV\naTH7ZOqaZhWBs4GXo5o7A7tK76FAl6j24e6+zd2XASlAYzMrRyQpTg+qtGFRxxy0nLzH1g14zd2P\ndfcq7l4JWAakAdFTVWzcY704sGuYVK+o9gnAVWaWAOmJc5d7gD+B52L6DQ5zRx11FN8tWrLXu8uO\nO/54VWsiRwiz2H2A0mY2O+rTZx+XfAroS+R3+S5l3X3Xg6i/AWWD5QrAL1H7rQjaKgTLe7ZnSU4m\ntguIDPOP9g6RQSSpZva1md0MfAbU3DV4BHgUeNjM5rH7PcGXgZ+Bb8zsayIjK6PdCBQMSmIRETl4\na3f1bgWf3W5UmllHYI27z9nfCYIKLEdfI5Njg0fc/bR9tA3c175Aoz3Wo1++dXdw7E7gluATfc4q\nUauXISJyxDAsZ8cztgA6mdlZRG4TFTOz14HVZlbO3VcF3Yy7HiBcCUQPe60YtK0MlvdszxI9pC4i\nIlni7ne6e8WgoEgGPnX3i4ExwKXBbpcCo4PlMUCymeU3s6pEBgXODLotN5hZ02BsRM+oYw6aptQS\nEQmRw2TikQHASDO7AlhOZKAg7r7AzEYCC4GdwLXunhoccw3/DPcfF3yyRIlNRCQkdo2KzA3u/jnw\nebD8B9BmP/v1B/rvo302EJPZqtUVKSIioaKKTUQkLOyw6YrMVUpsIiIhosSmrkgREQkZVWwiIiGS\nw8+xHZaU2EREQsKAOOU1dUWKiEi4qGITEQkRdUUqsYmIhIpGRaorUkREQkYVm4hIiKgrUolNRCQ0\nNCoyQl2RIiISKqrYRERCI8dfNHpYUsUmIiKhoopNRCQsNLs/oMQmIhIqymvqihQRkZBRxSYiEhKR\n4f6q2ZTYRERCRGlNXZEiIhIyqthERMJEJZsSm4hImOgBbXVFiohIyKhiExEJEQ2KVGITEQkV5TV1\nRYqISMioYhMRCROVbEpsIiJhYWhUJKgrUkREQkYVm4hIWOi1NYAqNhERCRlVbCIiIaKCTYlNRCRc\nlNnUFSkiIuGiik1EJDRMw/1RYhMRCRWNilRXpIiIhIwqNhGRkDA0dgSU2EREwkWZTV2RIiISLqrY\nRERCRKMildhEREJFoyLVFSkiIiGjik1EJERUsKliExGRkFHFJiISFnqQDVBiExEJFY2KVFekiIiE\njCo2EZGQMDTcH5TYRERCRXlNXZEiIhIySmwiImFiMfwc6FJmlczsMzNbaGYLzOzGoL2UmU0wsyXB\nnyWjjrnTzFLM7Hszax/VnmRm3wbbBpplvVNViU1EJEQshv/LhJ3Are5eE2gKXGtmNYE7gEnuXh2Y\nFKwTbEsGagEdgOfNLD441wtAb6B68OmQ1b8DJTYREckSd1/l7nOD5Y3AIqAC0BkYGuw2FOgSLHcG\nhrv7NndfBqQAjc2sHFDM3ae7uwPDoo45aBo8IiISIrk1KtLMqgAnAzOAsu6+Ktj0G1A2WK4ATI86\nbEXQtiNY3rM9S5TYRERCJMZ5rbSZzY5aH+Tug/a6plkR4B3gJnffEH17zN3dzDy2YWVMiU1ERPZn\nrbs3zGgHM0skktTecPd3g+bVZlbO3VcF3YxrgvaVQKWowysGbSuD5T3bs0T32EREwiRnR0UaMBhY\n5O5PRG0aA1waLF8KjI5qTzaz/GZWlcggkZlBt+UGM2sanLNn1DEHTRWbiEhIRPJRjt5kawFcAnxr\nZvODtruAAcBIM7sCWA70AHD3BWY2ElhIZETlte6eGhx3DTAEKAiMCz5ZosQmIiJZ4u5T2X9t12Y/\nx/QH+u+jfTZQOxZxKbGJiISFaa5I0D02EREJGVVsIiIhooJNiY25c+esLZhoy3M7jsNcaWBtbgch\neZp+hjJ2bMzOpMymxObuZXI7hsOdmc0+0LMsIhnRz5DkpCM+sYmIhEemJy8ONSU2EZEQ0ahIjYqU\nzNlrbjiRg6SfIckxqtjkgPY16anIwdDPUM7I5ExYoafEJiISJsps6ooUEZFwUcUmIhIiGhWpxCbZ\nZGY1gHLAFHffkdvxSN5hZubuOfoCyiOBRkUqsUn2JRN5cWCqmU1TcpPM2pXUzKwp8JO7/5bLIUlI\n6B6bZNd/gJ+A84GWwdt0RfbLzE42s3zB8nFEXmGyM3ejCo8cfM/oYUuJTQ5a8IZbANw9jcgvplUo\nuUnm3Ad8ECS3ZcB6YDuAmcWZWXwuxpa3Ba+tidUnr1Jik4MSfV/EzNqZ2alACeBB4Gciya25kpvs\nycziANy9M/AnMBIoQqTiLxRsSwPy5VKIEhK6xyYHJSqp3QKcS+QV772Bl939ITPrB/QBUoGpuRao\nHFaCfxClBctl3D3ZzEYDXxH5WSlnZqlAIrDKzO509y25GHIelodLrRhRYpODZmZtgdPcvZWZPQw0\nBi4wM9z9ETO7GUjJ3SjlcBL1D6IbgIZmdrW7dzazF4E2wKNAPJHq/3slNckOJTY5oH0My/4FuN7M\negGNgLOAJ4H7zCzR3Z/MhTDlMGdm5wKXAh3dfTOAu//LzN4GHgC6uLsGkWSDkbfvjcWK7rFJhva4\np9bEzEoCy9z9J6A68IK7rwK+Ab4G5udasHK4qwaMcfdVZpa46z7s/7d35zF2VnUYx7+PBYQuFAMB\nFNCWpeyLLQgFJQ2WTSkSAoSyVhqWEoigFtGiwUQDhmiElEVABKMCGllFUhYjIFIoVMpmyxqliFBQ\nlrKIwOMf5wy5jC1MyzB37nufT3PTufd95z1nJjPzu+e8v/M7tvcHngE+0dbeNUSyIjNii/fREtSO\nAaYDDwI3SLoMeAC4RNJYYF/KO/Fn29bZGDSWsvj6KeBzkla1/VI97wBgoe2pA97JaKwEtliiXiO1\nNYGtKPfStgV2BaYCMymp2tsD+9p+rE3djUGk18/OvsDLwGLgBuBg4AhJCyj302YAk9rV1ybKVGQC\nWyxBrz9MxwFrA5vbfh6YVdO2JwInAWfa/n37ehuDTa9EkYMoe7GdBBxLyZg9jvImaWVgsu0n2tTV\nRkqtyNxjiyXo9W77cOAuYF1Jl9fj1wO3UlKz81sU/0fSp4EvAROAdYFngQuB7W3PsH0QcJjt+9vX\ny2iqBLZ4R2tFEUnjKNNG59u+BtgQGCPpUgDbVwPfr6O46HKSVqvlsZC0FfAaMJkS3Ha1vTNwAXC5\npMn7U6MAAAedSURBVEMAbC9uV38bLdkjmYqMotf0437AppTqEBMk3WV7Xk0SeVzSxban9KRsR3eT\ntAIwBthL0seBNYCDbb9as2h/VU/9F/AjYHZ7etodOjge9ZsEtgDeNf24B+VeyO6U4HYIsLekt+u0\n0WhJo9vX0xhM6huiN2syyLeA8cBJtl+tp6wA7C5pY0qSyATbT7apu9ElMhUZ76h1H6cBc2z/1/Z9\nwNXAMOAgSZsD5GZ/ANTR2B716RhKzcezgbGSJgHYnglcQVnjuHeC2oerPwsgd3J2ZUZsXWwJa42e\noFTpX1/S1rbn2b69LqTdhbKINqLHisBOkr4DYHu8pDUomZCTJL1AKZP1BnBpT63I+HAlKzKBrWv1\nuqc2ibIf1gvA8cCZwP4904+2/yjpztTvCwBJa9v+p+1nJT0DbEYZlWH7OUnXUn6evgFsDXw+QS0G\nUqYiu5ykYymbhX4WuAg4sT5WA6ZI2gwgQS0AJG0C/EPSjyUdBJxHyXxcJOmc+obpCeBG4AhgB9sP\nt7HL3SdZkQls3UbSJyUNs+1aUeQASgbbDGBH4Bhgf8rmoUMo648ieiwG/kyZsp4KnAuMBGYBLwEz\nJR1KeXP0ku2n2tXR6F4JbF1E0lrA14BpkobXuo7PUXcvtv1v4ARgy1rYeLrt59rW4Rh0bC+kLNgf\nS8mcvRk4lFKd/1pgdWAKMNP2623qZlfLgC2BrdssAuZQqqh/uS7IfhS4rK5FAvgUpcrIEMp9kgjg\nXQv4TwZMWa/2NDAOuJ9yf3YhcLjth9rSyUhWJEke6QqSNgI+YnuBpF9SChfvCRxp+2RJ5wK3SrqP\nUtD4YNtvtbHLMQjV6eueP3ePAD+kBLUTbV9V7789U0f+EW2TwNZwklYHFgDPSfou8BalKO1IYENJ\nR9ueJml7SlHaH2SdWixNzaR9Q9IvgFuAs21fVY/Nb2vngpLs38FDrX6SwNZwtp+XNBG4iTL1vDVw\nOSUJ4A1gy/ou/Ge2/9O+nkYnqaP/k4FRkoa2VBqJNhKdPYXYXxLYuoDtP0jaHTiLEtjWoiy4PpCy\nfcjGwKVAAlssi9mUDWYjBpUEti5h+0ZJX6fser2D7UskXUOpHjHU9ovt7WF0GtvzJR2Y0VoMNgls\nXcT2dZLeBmZLGp8tZ+KDSlAbfDIVmcDWdWxfL2kl4CZJ41LqKCKaJoGtC9m+WtLNCWoRzZOsyAS2\nrpXdiyMaqMMXVveXVB6JiIhGyYgtIqIhOr3GY39JYIuIaJJEtkxFRueR9JakeyU9IOk3koZ+gGtN\nkPS7+vHetZrG0s5dre5ft6xtnFrXEPbp9V7nXCxpv2Voa5SkB5a1jxFNksAWneg129vY3oJSFuyY\n1oMqlvln2/Y1tk9/j1NWA5Y5sEUMJPXjv06VwBad7jZKMedRkhZI+jmlusp6knaTdIekuXVkNxxA\n0h6S5kuaS0tJKElTJM2sH68l6UpJ8+pjR+B0YIM6Wjyjnjdd0hxJ99Ui0z3XmiHpYUl/opQse0+S\njqzXmSfpt71GoRMl3V2vt1c9f4ikM1raPvqDfiMjmiKBLTpW3UNuT8peYAAbAefY3hx4BTgFmGh7\nLHA38FVJKwMXAJMoW66svZTLnwXcYntryqaaD1L2IXusjhanS9qttvkZYBtgnKSdJY2j1OHcBvgC\nsF0fvpwrbG9X2/srZXfqHqNqG18Ezqtfw1TgRdvb1esfKWl0H9qJhst+bEkeic60iqR768e3AT+l\nbJ76N9uz6+s7AJsBt9ctxFYC7gA2AZ6w/QhA3X7lqCW0sQtwGEDdm+5FSR/rdc5u9fGX+nw4JdCN\nAK7sKTdVa3K+ny0kfY8y3TkcmNVy7Nd1Mf0jkh6vX8NuwFYt999G1rYf7kNb0WAdHI/6TQJbdKLX\nbG/T+kINXq+0vgTcaHtyr/Pe9XkfkIDTbP+kVxsnLMe1Lgb2sT1P0hRgQssx9zrXte3jbbcGQCSN\nWo62IxolU5HRVLOBnSRtCCBpmKQxwHzKHmIb1PMmL+Xzbwam1c8dImkk8DJlNNZjFnBEy727dSSt\nCdwK7CNpFUkjKNOe72cE8LSkFYGDex3bX9JHap/Xp2wcOwuYVs9H0hhJw/rQTjSd+vHRl+bKPesF\nkh59r6zigZQRWzSS7UV15HOppI/Wl0+x/bCko4DrJL1KmcocsYRLfAU4X9JUyq7j02zfIen2mk5/\nfb3PtilwRx0xLgYOsT1X0uXAPOBZYE4fuvxt4E5gUf2/tU9/B+4CVgWOsf26pAsp997mqjS+CNin\nb9+daLKBzGaUNAQ4G9gVWAjMkXSN7YcGrBNL6lfZ6T0iIjrd2HHb+vbZd/fb9YaupHtsb7u045LG\nA6fa3r0+/yaA7dP6rRPLISO2iIiGEAOezbgO8GTL84XA9gPagyVIYIuIaIi5c++ZtcqKWqMfL7my\npNYh4Pm2z+/H638oEtgiIhrC9h4D3ORTwHotz9etr7VVsiIjImJ5zQE2kjRa0kqUwgR9Wbf5ocqI\nLSIilovtNyUdR1l+MgS4yPaDbe5WsiIjIqJZMhUZERGNksAWERGNksAWERGNksAWERGNksAWERGN\nksAWERGNksAWERGNksAWERGN8j+AYoG1DXJndgAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7f1a4832f978>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plot(actual_value = df_.loc[:,'Actual'].values.astype(int),\n",
    "     pred_value = df_.loc[:,'Prediction'].values.astype(int))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2017-07-23T23:41:07.567012Z",
     "start_time": "2017-07-23T23:41:07.553256Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "no_of_features  hidden_layers\n",
       "1               1                (0.896582997095, 0.920316318303)\n",
       "                3                 (0.979369982016, 1.00791284291)\n",
       "dtype: object"
      ]
     },
     "execution_count": 30,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from scipy import stats\n",
    "\n",
    "def fn(x):\n",
    "    #print(x)\n",
    "    return stats.norm.interval(0.95, loc=x.f1_score.mean(), scale=x.f1_score.std())\n",
    "psg.apply(fn)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "_draft": {
   "nbviewer_url": "https://gist.github.com/7d1ace18a82178e15ece8fc5252fce88"
  },
  "anaconda-cloud": {},
  "gist": {
   "data": {
    "description": "Hyper parameter tuning",
    "public": false
   },
   "id": "7d1ace18a82178e15ece8fc5252fce88"
  },
  "kernelspec": {
   "display_name": "Python [conda env:p3]",
   "language": "python",
   "name": "conda-env-p3-py"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.1"
  },
  "varInspector": {
   "cols": {
    "lenName": 16,
    "lenType": 16,
    "lenVar": 40
   },
   "kernels_config": {
    "python": {
     "delete_cmd_postfix": "",
     "delete_cmd_prefix": "del ",
     "library": "var_list.py",
     "varRefreshCmd": "print(var_dic_list())"
    },
    "r": {
     "delete_cmd_postfix": ") ",
     "delete_cmd_prefix": "rm(",
     "library": "var_list.r",
     "varRefreshCmd": "cat(var_dic_list()) "
    }
   },
   "types_to_exclude": [
    "module",
    "function",
    "builtin_function_or_method",
    "instance",
    "_Feature"
   ],
   "window_display": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
