{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# 导入库\n",
    "import pandas as pd\n",
    "import numpy as np"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>id</th>\n",
       "      <th>a</th>\n",
       "      <th>b</th>\n",
       "      <th>c</th>\n",
       "      <th>t</th>\n",
       "      <th>distance</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0</td>\n",
       "      <td>5.343934</td>\n",
       "      <td>-3.793345</td>\n",
       "      <td>-5.849364</td>\n",
       "      <td>0</td>\n",
       "      <td>8.784200</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>1</td>\n",
       "      <td>-2.078382</td>\n",
       "      <td>0.795553</td>\n",
       "      <td>-3.898876</td>\n",
       "      <td>0</td>\n",
       "      <td>4.489300</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>2</td>\n",
       "      <td>-6.923646</td>\n",
       "      <td>1.368068</td>\n",
       "      <td>-3.871322</td>\n",
       "      <td>0</td>\n",
       "      <td>8.049572</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>3</td>\n",
       "      <td>-13.794147</td>\n",
       "      <td>4.336902</td>\n",
       "      <td>16.739670</td>\n",
       "      <td>1</td>\n",
       "      <td>22.120211</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>4</td>\n",
       "      <td>-5.175507</td>\n",
       "      <td>0.138892</td>\n",
       "      <td>1.171502</td>\n",
       "      <td>0</td>\n",
       "      <td>5.308256</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   id          a         b          c  t   distance\n",
       "0   0   5.343934 -3.793345  -5.849364  0   8.784200\n",
       "1   1  -2.078382  0.795553  -3.898876  0   4.489300\n",
       "2   2  -6.923646  1.368068  -3.871322  0   8.049572\n",
       "3   3 -13.794147  4.336902  16.739670  1  22.120211\n",
       "4   4  -5.175507  0.138892   1.171502  0   5.308256"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "trainSet = pd.read_csv(\"Training-set.csv\")\n",
    "trainSet.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>id</th>\n",
       "      <th>a</th>\n",
       "      <th>b</th>\n",
       "      <th>c</th>\n",
       "      <th>distance</th>\n",
       "      <th>t</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>7000</td>\n",
       "      <td>-9.763325</td>\n",
       "      <td>-5.980476</td>\n",
       "      <td>-1.116588</td>\n",
       "      <td>11.503712</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>7001</td>\n",
       "      <td>5.717620</td>\n",
       "      <td>-4.977784</td>\n",
       "      <td>-9.168925</td>\n",
       "      <td>11.897003</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>7002</td>\n",
       "      <td>5.170640</td>\n",
       "      <td>-5.495420</td>\n",
       "      <td>-7.009920</td>\n",
       "      <td>10.299229</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>7003</td>\n",
       "      <td>-4.642903</td>\n",
       "      <td>-9.158488</td>\n",
       "      <td>-2.367180</td>\n",
       "      <td>10.537456</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>7004</td>\n",
       "      <td>8.303382</td>\n",
       "      <td>5.461795</td>\n",
       "      <td>1.792073</td>\n",
       "      <td>10.098955</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "     id         a         b         c   distance  t\n",
       "0  7000 -9.763325 -5.980476 -1.116588  11.503712  1\n",
       "1  7001  5.717620 -4.977784 -9.168925  11.897003  0\n",
       "2  7002  5.170640 -5.495420 -7.009920  10.299229  0\n",
       "3  7003 -4.642903 -9.158488 -2.367180  10.537456  0\n",
       "4  7004  8.303382  5.461795  1.792073  10.098955  0"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "testSet = pd.read_csv(\"Testing-set.csv\")\n",
    "testSet.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "1    525\n",
       "0    475\n",
       "Name: t, dtype: int64"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 查看每个种类有多少项实例\n",
    "testSet[\"t\"].value_counts()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[  5.34393442,  -3.79334507,  -5.84936405],\n",
       "       [ -2.07838159,   0.79555251,  -3.8988764 ],\n",
       "       [ -6.92364571,   1.36806794,  -3.87132155],\n",
       "       [-13.79414689,   4.33690176,  16.73966994],\n",
       "       [ -5.17550727,   0.13889184,   1.17150158],\n",
       "       [ -2.52705424,  -8.46192975,   8.78905177],\n",
       "       [ -8.12192239,  -1.25755102,   5.60437402],\n",
       "       [  8.38061351,  -1.44151983,  -3.02848235],\n",
       "       [ -1.59927128,  -4.55251467,   3.0050456 ],\n",
       "       [-21.34622842,  18.42139246, -18.39413667]])"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x_train = np.array(trainSet[[\"a\", \"b\", \"c\"]])\n",
    "x_train[0:10]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[1, 0],\n",
       "       [1, 0],\n",
       "       [1, 0],\n",
       "       [0, 1],\n",
       "       [1, 0],\n",
       "       [0, 1],\n",
       "       [1, 0],\n",
       "       [1, 0],\n",
       "       [1, 0],\n",
       "       [0, 1]], dtype=int64)"
      ]
     },
     "execution_count": 48,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "TrainSetOneHotLabel = pd.read_csv(\"TrainSetOneHotLabel.csv\")\n",
    "y_train = np.array(TrainSetOneHotLabel)\n",
    "y_train[0:10]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[-9.76332494, -5.98047647, -1.11658814],\n",
       "       [ 5.7176201 , -4.97778351, -9.16892473],\n",
       "       [ 5.17063988, -5.49541958, -7.00991964],\n",
       "       [-4.64290272, -9.15848767, -2.36717971],\n",
       "       [ 8.30338227,  5.46179545,  1.79207252]])"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "testX = np.array(testSet[[\"a\", \"b\", \"c\"]])\n",
    "testX[0:5]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 52,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[0, 1],\n",
       "       [1, 0],\n",
       "       [1, 0],\n",
       "       [1, 0],\n",
       "       [1, 0],\n",
       "       [1, 0],\n",
       "       [0, 1],\n",
       "       [1, 0],\n",
       "       [0, 1],\n",
       "       [1, 0]], dtype=int64)"
      ]
     },
     "execution_count": 52,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "TestingOneHotlabel = pd.read_csv(\"TestingOneHotlabel.csv\")\n",
    "testY = np.array(TestingOneHotlabel)\n",
    "testY[0:10]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([1, 0, 0, 0, 0, 0, 1, 0, 1, 0], dtype=int64)"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "np.argmax(testY[0:10], 1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    " from sklearn import tree"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "clf = tree.DecisionTreeClassifier()\n",
    "clf.fit(trainSet[[\"a\", \"b\", \"c\"]], trainSet[\"t\"])\n",
    "treePredict = clf.predict(testSet[[\"a\", \"b\", \"c\"]])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "762"
      ]
     },
     "execution_count": 12,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "sum(treePredict==testSet[\"t\"])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from sklearn.neighbors import KNeighborsClassifier"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "knn = KNeighborsClassifier(n_neighbors=1, p=2) \n",
    "knn.fit(trainSet[[\"a\", \"b\", \"c\"]], trainSet[\"t\"])\n",
    "knnpredict = knn.predict(testSet[[\"a\", \"b\", \"c\"]])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "772"
      ]
     },
     "execution_count": 37,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "sum(knnpredict==testSet[\"t\"])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "metadata": {},
   "outputs": [],
   "source": [
    "x_up_train = trainSet.loc[trainSet[\"a\"]>0][[\"a\", \"b\", \"c\"]]\n",
    "y_up_train = TrainSetOneHotLabel.loc[trainSet[\"a\"]>0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 51,
   "metadata": {},
   "outputs": [],
   "source": [
    "x_down_train = trainSet.loc[trainSet[\"a\"]<=0][[\"a\", \"b\", \"c\"]]\n",
    "y_down_train = TrainSetOneHotLabel.loc[trainSet[\"a\"]<=0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 53,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "x_up_test = testSet.loc[testSet[\"a\"]>0][[\"a\", \"b\", \"c\"]]\n",
    "y_up_test = TestingOneHotlabel.loc[testSet[\"a\"]>0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 54,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "x_down_test = testSet.loc[testSet[\"a\"]<=0][[\"a\", \"b\", \"c\"]]\n",
    "y_down_test = TestingOneHotlabel.loc[testSet[\"a\"]<=0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 58,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(3462, 3) (3462, 2) (3538, 3) (3538, 2)\n",
      "(509, 3) (509, 2) (491, 3) (491, 2)\n"
     ]
    }
   ],
   "source": [
    "print(np.shape(x_up_train), np.shape(y_up_train), np.shape(x_down_train), np.shape(y_down_train))\n",
    "print(np.shape(x_up_test), np.shape(y_up_test), np.shape(x_down_test), np.shape(y_down_test))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import tensorflow as tf"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 62,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "xTrain = tf.placeholder(dtype=tf.float32, shape=[None, 3]);\n",
    "yTrain = tf.placeholder(dtype=tf.float32, shape=[None, 2])\n",
    "\n",
    "weight1 = tf.Variable(tf.random_normal([3, 33]))\n",
    "biases1 = tf.Variable(tf.zeros([1, 33]) + 0.1)\n",
    "weight2 = tf.Variable(tf.random_normal([33, 33]))\n",
    "biases2 = tf.Variable(tf.zeros([1, 33]) + 0.1)\n",
    "weight3 = tf.Variable(tf.random_normal([33, 33]))\n",
    "biases3 = tf.Variable(tf.zeros([1, 33]) + 0.1)\n",
    "weight4 = tf.Variable(tf.random_normal([33, 2]))\n",
    "biases4 = tf.Variable(tf.zeros([1, 2]) + 0.1)\n",
    "\n",
    "hidden1 = tf.matmul(xTrain, weight1) + biases1\n",
    "hidden2 = tf.matmul(tf.nn.relu(hidden1), weight2) + biases2\n",
    "hidden3 = tf.matmul(tf.nn.relu(hidden2), weight3) + biases3\n",
    "output = tf.matmul(tf.nn.relu(hidden3), weight4) + biases4\n",
    "\n",
    "predictions = tf.nn.softmax(output)\n",
    "\n",
    "cross_entropy = tf.reduce_mean(-tf.reduce_sum(yTrain * tf.log(tf.clip_by_value(predictions, 1e-10,1.0)), reduction_indices=[1]))\n",
    "train_step = tf.train.AdamOptimizer(0.001).minimize(cross_entropy)\n",
    "\n",
    "correct_predictions = tf.equal(tf.argmax(predictions, 1), tf.argmax(yTrain, 1))\n",
    "accuracy = tf.reduce_mean(tf.cast(correct_predictions, tf.float32))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 66,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "After 499 training steps, cross entropy on all train data is 0.443015\n",
      "After 499 training steps, accuracy on all train data is 0.930676\n",
      "After 499 training steps, accuracy on all test data is 0.56778\n",
      "After 999 training steps, cross entropy on all train data is 0.231165\n",
      "After 999 training steps, accuracy on all train data is 0.957828\n",
      "After 999 training steps, accuracy on all test data is 0.605108\n",
      "After 1499 training steps, cross entropy on all train data is 0.163212\n",
      "After 1499 training steps, accuracy on all train data is 0.964183\n",
      "After 1499 training steps, accuracy on all test data is 0.609037\n",
      "After 1999 training steps, cross entropy on all train data is 0.136502\n",
      "After 1999 training steps, accuracy on all train data is 0.973137\n",
      "After 1999 training steps, accuracy on all test data is 0.644401\n",
      "After 2499 training steps, cross entropy on all train data is 0.128072\n",
      "After 2499 training steps, accuracy on all train data is 0.974581\n",
      "After 2499 training steps, accuracy on all test data is 0.652259\n",
      "After 2999 training steps, cross entropy on all train data is 0.119027\n",
      "After 2999 training steps, accuracy on all train data is 0.976603\n",
      "After 2999 training steps, accuracy on all test data is 0.656189\n",
      "After 3499 training steps, cross entropy on all train data is 0.0904303\n",
      "After 3499 training steps, accuracy on all train data is 0.981802\n",
      "After 3499 training steps, accuracy on all test data is 0.6778\n",
      "After 3999 training steps, cross entropy on all train data is 0.0901465\n",
      "After 3999 training steps, accuracy on all train data is 0.983536\n",
      "After 3999 training steps, accuracy on all test data is 0.683694\n",
      "After 4499 training steps, cross entropy on all train data is 0.0635517\n",
      "After 4499 training steps, accuracy on all train data is 0.985557\n",
      "After 4499 training steps, accuracy on all test data is 0.685658\n",
      "After 4999 training steps, cross entropy on all train data is 0.0591968\n",
      "After 4999 training steps, accuracy on all train data is 0.986713\n",
      "After 4999 training steps, accuracy on all test data is 0.687623\n",
      "After 5499 training steps, cross entropy on all train data is 0.0565463\n",
      "After 5499 training steps, accuracy on all train data is 0.987579\n",
      "After 5499 training steps, accuracy on all test data is 0.693517\n",
      "After 5999 training steps, cross entropy on all train data is 0.0544938\n",
      "After 5999 training steps, accuracy on all train data is 0.988157\n",
      "After 5999 training steps, accuracy on all test data is 0.701375\n",
      "After 6499 training steps, cross entropy on all train data is 0.0589237\n",
      "After 6499 training steps, accuracy on all train data is 0.986424\n",
      "After 6499 training steps, accuracy on all test data is 0.695481\n",
      "After 6999 training steps, cross entropy on all train data is 0.0519623\n",
      "After 6999 training steps, accuracy on all train data is 0.988446\n",
      "After 6999 training steps, accuracy on all test data is 0.695481\n",
      "After 7499 training steps, cross entropy on all train data is 0.0507154\n",
      "After 7499 training steps, accuracy on all train data is 0.988735\n",
      "After 7499 training steps, accuracy on all test data is 0.697446\n",
      "After 7999 training steps, cross entropy on all train data is 0.0500489\n",
      "After 7999 training steps, accuracy on all train data is 0.989313\n",
      "After 7999 training steps, accuracy on all test data is 0.697446\n",
      "After 8499 training steps, cross entropy on all train data is 0.0488502\n",
      "After 8499 training steps, accuracy on all train data is 0.988735\n",
      "After 8499 training steps, accuracy on all test data is 0.699411\n",
      "After 8999 training steps, cross entropy on all train data is 0.0473975\n",
      "After 8999 training steps, accuracy on all train data is 0.990179\n",
      "After 8999 training steps, accuracy on all test data is 0.707269\n",
      "After 9499 training steps, cross entropy on all train data is 0.0467238\n",
      "After 9499 training steps, accuracy on all train data is 0.990468\n",
      "After 9499 training steps, accuracy on all test data is 0.709234\n",
      "After 9999 training steps, cross entropy on all train data is 0.0458879\n",
      "After 9999 training steps, accuracy on all train data is 0.990179\n",
      "After 9999 training steps, accuracy on all test data is 0.70334\n",
      "After 10499 training steps, cross entropy on all train data is 0.0451432\n",
      "After 10499 training steps, accuracy on all train data is 0.990468\n",
      "After 10499 training steps, accuracy on all test data is 0.709234\n",
      "After 10999 training steps, cross entropy on all train data is 0.0446942\n",
      "After 10999 training steps, accuracy on all train data is 0.990468\n",
      "After 10999 training steps, accuracy on all test data is 0.713163\n",
      "After 11499 training steps, cross entropy on all train data is 0.0441342\n",
      "After 11499 training steps, accuracy on all train data is 0.991046\n",
      "After 11499 training steps, accuracy on all test data is 0.713163\n",
      "After 11999 training steps, cross entropy on all train data is 0.0435192\n",
      "After 11999 training steps, accuracy on all train data is 0.991623\n",
      "After 11999 training steps, accuracy on all test data is 0.715128\n",
      "After 12499 training steps, cross entropy on all train data is 0.043339\n",
      "After 12499 training steps, accuracy on all train data is 0.991912\n",
      "After 12499 training steps, accuracy on all test data is 0.717092\n",
      "After 12999 training steps, cross entropy on all train data is 0.042983\n",
      "After 12999 training steps, accuracy on all train data is 0.991912\n",
      "After 12999 training steps, accuracy on all test data is 0.717092\n",
      "After 13499 training steps, cross entropy on all train data is 0.0426177\n",
      "After 13499 training steps, accuracy on all train data is 0.991912\n",
      "After 13499 training steps, accuracy on all test data is 0.717092\n",
      "After 13999 training steps, cross entropy on all train data is 0.0425141\n",
      "After 13999 training steps, accuracy on all train data is 0.99249\n",
      "After 13999 training steps, accuracy on all test data is 0.724951\n",
      "After 14499 training steps, cross entropy on all train data is 0.0420625\n",
      "After 14499 training steps, accuracy on all train data is 0.992201\n",
      "After 14499 training steps, accuracy on all test data is 0.721022\n",
      "After 14999 training steps, cross entropy on all train data is 0.0417375\n",
      "After 14999 training steps, accuracy on all train data is 0.992201\n",
      "After 14999 training steps, accuracy on all test data is 0.721022\n",
      "After 15499 training steps, cross entropy on all train data is 0.0415319\n",
      "After 15499 training steps, accuracy on all train data is 0.991912\n",
      "After 15499 training steps, accuracy on all test data is 0.717092\n",
      "After 15999 training steps, cross entropy on all train data is 0.0354875\n",
      "After 15999 training steps, accuracy on all train data is 0.991912\n",
      "After 15999 training steps, accuracy on all test data is 0.726916\n",
      "After 16499 training steps, cross entropy on all train data is 0.0333494\n",
      "After 16499 training steps, accuracy on all train data is 0.991912\n",
      "After 16499 training steps, accuracy on all test data is 0.726916\n",
      "After 16999 training steps, cross entropy on all train data is 0.0317197\n",
      "After 16999 training steps, accuracy on all train data is 0.992201\n",
      "After 16999 training steps, accuracy on all test data is 0.721022\n",
      "After 17499 training steps, cross entropy on all train data is 0.0274636\n",
      "After 17499 training steps, accuracy on all train data is 0.991912\n",
      "After 17499 training steps, accuracy on all test data is 0.715128\n",
      "After 17999 training steps, cross entropy on all train data is 0.0245188\n",
      "After 17999 training steps, accuracy on all train data is 0.993356\n",
      "After 17999 training steps, accuracy on all test data is 0.722986\n",
      "After 18499 training steps, cross entropy on all train data is 0.0233873\n",
      "After 18499 training steps, accuracy on all train data is 0.993356\n",
      "After 18499 training steps, accuracy on all test data is 0.721022\n",
      "After 18999 training steps, cross entropy on all train data is 0.02303\n",
      "After 18999 training steps, accuracy on all train data is 0.993356\n",
      "After 18999 training steps, accuracy on all test data is 0.717092\n",
      "After 19499 training steps, cross entropy on all train data is 0.0225228\n",
      "After 19499 training steps, accuracy on all train data is 0.993356\n",
      "After 19499 training steps, accuracy on all test data is 0.717092\n",
      "After 19999 training steps, cross entropy on all train data is 0.0220161\n",
      "After 19999 training steps, accuracy on all train data is 0.993356\n",
      "After 19999 training steps, accuracy on all test data is 0.715128\n",
      "After 20499 training steps, cross entropy on all train data is 0.0218597\n",
      "After 20499 training steps, accuracy on all train data is 0.993068\n",
      "After 20499 training steps, accuracy on all test data is 0.711198\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "After 20999 training steps, cross entropy on all train data is 0.0214083\n",
      "After 20999 training steps, accuracy on all train data is 0.993068\n",
      "After 20999 training steps, accuracy on all test data is 0.711198\n",
      "After 21499 training steps, cross entropy on all train data is 0.0210246\n",
      "After 21499 training steps, accuracy on all train data is 0.993356\n",
      "After 21499 training steps, accuracy on all test data is 0.715128\n",
      "After 21999 training steps, cross entropy on all train data is 0.0206081\n",
      "After 21999 training steps, accuracy on all train data is 0.993645\n",
      "After 21999 training steps, accuracy on all test data is 0.717092\n",
      "After 22499 training steps, cross entropy on all train data is 0.0231435\n",
      "After 22499 training steps, accuracy on all train data is 0.993068\n",
      "After 22499 training steps, accuracy on all test data is 0.715128\n",
      "After 22999 training steps, cross entropy on all train data is 0.0225243\n",
      "After 22999 training steps, accuracy on all train data is 0.993068\n",
      "After 22999 training steps, accuracy on all test data is 0.713163\n",
      "After 23499 training steps, cross entropy on all train data is 0.0220391\n",
      "After 23499 training steps, accuracy on all train data is 0.993068\n",
      "After 23499 training steps, accuracy on all test data is 0.711198\n",
      "After 23999 training steps, cross entropy on all train data is 0.021477\n",
      "After 23999 training steps, accuracy on all train data is 0.993068\n",
      "After 23999 training steps, accuracy on all test data is 0.711198\n",
      "After 24499 training steps, cross entropy on all train data is 0.0213467\n",
      "After 24499 training steps, accuracy on all train data is 0.993068\n",
      "After 24499 training steps, accuracy on all test data is 0.711198\n",
      "After 24999 training steps, cross entropy on all train data is 0.0209495\n",
      "After 24999 training steps, accuracy on all train data is 0.993068\n",
      "After 24999 training steps, accuracy on all test data is 0.713163\n",
      "After 25499 training steps, cross entropy on all train data is 0.02057\n",
      "After 25499 training steps, accuracy on all train data is 0.993068\n",
      "After 25499 training steps, accuracy on all test data is 0.713163\n",
      "After 25999 training steps, cross entropy on all train data is 0.0201816\n",
      "After 25999 training steps, accuracy on all train data is 0.992779\n",
      "After 25999 training steps, accuracy on all test data is 0.711198\n",
      "After 26499 training steps, cross entropy on all train data is 0.0192014\n",
      "After 26499 training steps, accuracy on all train data is 0.993068\n",
      "After 26499 training steps, accuracy on all test data is 0.713163\n",
      "After 26999 training steps, cross entropy on all train data is 0.0188522\n",
      "After 26999 training steps, accuracy on all train data is 0.993068\n",
      "After 26999 training steps, accuracy on all test data is 0.713163\n",
      "After 27499 training steps, cross entropy on all train data is 0.0185649\n",
      "After 27499 training steps, accuracy on all train data is 0.993068\n",
      "After 27499 training steps, accuracy on all test data is 0.713163\n",
      "After 27999 training steps, cross entropy on all train data is 0.018274\n",
      "After 27999 training steps, accuracy on all train data is 0.993068\n",
      "After 27999 training steps, accuracy on all test data is 0.713163\n",
      "After 28499 training steps, cross entropy on all train data is 0.0183184\n",
      "After 28499 training steps, accuracy on all train data is 0.993068\n",
      "After 28499 training steps, accuracy on all test data is 0.717092\n",
      "After 28999 training steps, cross entropy on all train data is 0.0180596\n",
      "After 28999 training steps, accuracy on all train data is 0.993068\n",
      "After 28999 training steps, accuracy on all test data is 0.715128\n",
      "After 29499 training steps, cross entropy on all train data is 0.0178541\n",
      "After 29499 training steps, accuracy on all train data is 0.993068\n",
      "After 29499 training steps, accuracy on all test data is 0.719057\n",
      "After 29999 training steps, cross entropy on all train data is 0.0176298\n",
      "After 29999 training steps, accuracy on all train data is 0.993068\n",
      "After 29999 training steps, accuracy on all test data is 0.717092\n",
      "After 30499 training steps, cross entropy on all train data is 0.0177269\n",
      "After 30499 training steps, accuracy on all train data is 0.993356\n",
      "After 30499 training steps, accuracy on all test data is 0.721022\n",
      "After 30999 training steps, cross entropy on all train data is 0.0174563\n",
      "After 30999 training steps, accuracy on all train data is 0.993068\n",
      "After 30999 training steps, accuracy on all test data is 0.715128\n",
      "After 31499 training steps, cross entropy on all train data is 0.0172723\n",
      "After 31499 training steps, accuracy on all train data is 0.993068\n",
      "After 31499 training steps, accuracy on all test data is 0.717092\n",
      "After 31999 training steps, cross entropy on all train data is 0.0170745\n",
      "After 31999 training steps, accuracy on all train data is 0.993356\n",
      "After 31999 training steps, accuracy on all test data is 0.721022\n",
      "After 32499 training steps, cross entropy on all train data is 0.0185281\n",
      "After 32499 training steps, accuracy on all train data is 0.993934\n",
      "After 32499 training steps, accuracy on all test data is 0.721022\n",
      "After 32999 training steps, cross entropy on all train data is 0.0178911\n",
      "After 32999 training steps, accuracy on all train data is 0.993356\n",
      "After 32999 training steps, accuracy on all test data is 0.717092\n",
      "After 33499 training steps, cross entropy on all train data is 0.0176087\n",
      "After 33499 training steps, accuracy on all train data is 0.993356\n",
      "After 33499 training steps, accuracy on all test data is 0.717092\n",
      "After 33999 training steps, cross entropy on all train data is 0.0173751\n",
      "After 33999 training steps, accuracy on all train data is 0.993356\n",
      "After 33999 training steps, accuracy on all test data is 0.719057\n",
      "After 34499 training steps, cross entropy on all train data is 0.0171526\n",
      "After 34499 training steps, accuracy on all train data is 0.993645\n",
      "After 34499 training steps, accuracy on all test data is 0.721022\n",
      "After 34999 training steps, cross entropy on all train data is 0.0171772\n",
      "After 34999 training steps, accuracy on all train data is 0.993645\n",
      "After 34999 training steps, accuracy on all test data is 0.719057\n",
      "After 35499 training steps, cross entropy on all train data is 0.0169599\n",
      "After 35499 training steps, accuracy on all train data is 0.993645\n",
      "After 35499 training steps, accuracy on all test data is 0.721022\n",
      "After 35999 training steps, cross entropy on all train data is 0.0167845\n",
      "After 35999 training steps, accuracy on all train data is 0.994223\n",
      "After 35999 training steps, accuracy on all test data is 0.722986\n",
      "After 36499 training steps, cross entropy on all train data is 0.0166012\n",
      "After 36499 training steps, accuracy on all train data is 0.994223\n",
      "After 36499 training steps, accuracy on all test data is 0.724951\n",
      "After 36999 training steps, cross entropy on all train data is 0.0167729\n",
      "After 36999 training steps, accuracy on all train data is 0.993934\n",
      "After 36999 training steps, accuracy on all test data is 0.721022\n",
      "After 37499 training steps, cross entropy on all train data is 0.0164709\n",
      "After 37499 training steps, accuracy on all train data is 0.994512\n",
      "After 37499 training steps, accuracy on all test data is 0.719057\n",
      "After 37999 training steps, cross entropy on all train data is 0.0162776\n",
      "After 37999 training steps, accuracy on all train data is 0.994512\n",
      "After 37999 training steps, accuracy on all test data is 0.721022\n",
      "After 38499 training steps, cross entropy on all train data is 0.0161007\n",
      "After 38499 training steps, accuracy on all train data is 0.994512\n",
      "After 38499 training steps, accuracy on all test data is 0.719057\n",
      "After 38999 training steps, cross entropy on all train data is 0.0511469\n",
      "After 38999 training steps, accuracy on all train data is 0.979492\n",
      "After 38999 training steps, accuracy on all test data is 0.683694\n",
      "After 39499 training steps, cross entropy on all train data is 0.0160283\n",
      "After 39499 training steps, accuracy on all train data is 0.994512\n",
      "After 39499 training steps, accuracy on all test data is 0.719057\n",
      "After 39999 training steps, cross entropy on all train data is 0.0158344\n",
      "After 39999 training steps, accuracy on all train data is 0.994512\n",
      "After 39999 training steps, accuracy on all test data is 0.717092\n",
      "After 40499 training steps, cross entropy on all train data is 0.015682\n",
      "After 40499 training steps, accuracy on all train data is 0.994512\n",
      "After 40499 training steps, accuracy on all test data is 0.717092\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "After 40999 training steps, cross entropy on all train data is 0.0155249\n",
      "After 40999 training steps, accuracy on all train data is 0.994512\n",
      "After 40999 training steps, accuracy on all test data is 0.719057\n",
      "After 41499 training steps, cross entropy on all train data is 0.0158233\n",
      "After 41499 training steps, accuracy on all train data is 0.994801\n",
      "After 41499 training steps, accuracy on all test data is 0.719057\n",
      "After 41999 training steps, cross entropy on all train data is 0.015528\n",
      "After 41999 training steps, accuracy on all train data is 0.994801\n",
      "After 41999 training steps, accuracy on all test data is 0.713163\n",
      "After 42499 training steps, cross entropy on all train data is 0.0153643\n",
      "After 42499 training steps, accuracy on all train data is 0.994801\n",
      "After 42499 training steps, accuracy on all test data is 0.717092\n",
      "After 42999 training steps, cross entropy on all train data is 0.0151966\n",
      "After 42999 training steps, accuracy on all train data is 0.994801\n",
      "After 42999 training steps, accuracy on all test data is 0.717092\n",
      "After 43499 training steps, cross entropy on all train data is 0.0150506\n",
      "After 43499 training steps, accuracy on all train data is 0.994801\n",
      "After 43499 training steps, accuracy on all test data is 0.719057\n",
      "After 43999 training steps, cross entropy on all train data is 0.0153736\n",
      "After 43999 training steps, accuracy on all train data is 0.99509\n",
      "After 43999 training steps, accuracy on all test data is 0.715128\n",
      "After 44499 training steps, cross entropy on all train data is 0.01511\n",
      "After 44499 training steps, accuracy on all train data is 0.99509\n",
      "After 44499 training steps, accuracy on all test data is 0.713163\n",
      "After 44999 training steps, cross entropy on all train data is 0.0149413\n",
      "After 44999 training steps, accuracy on all train data is 0.99509\n",
      "After 44999 training steps, accuracy on all test data is 0.713163\n",
      "After 45499 training steps, cross entropy on all train data is 0.0148007\n",
      "After 45499 training steps, accuracy on all train data is 0.99509\n",
      "After 45499 training steps, accuracy on all test data is 0.713163\n",
      "After 45999 training steps, cross entropy on all train data is 0.0146684\n",
      "After 45999 training steps, accuracy on all train data is 0.99509\n",
      "After 45999 training steps, accuracy on all test data is 0.717092\n",
      "After 46499 training steps, cross entropy on all train data is 0.0151219\n",
      "After 46499 training steps, accuracy on all train data is 0.99509\n",
      "After 46499 training steps, accuracy on all test data is 0.717092\n",
      "After 46999 training steps, cross entropy on all train data is 0.0148151\n",
      "After 46999 training steps, accuracy on all train data is 0.99509\n",
      "After 46999 training steps, accuracy on all test data is 0.715128\n",
      "After 47499 training steps, cross entropy on all train data is 0.0146367\n",
      "After 47499 training steps, accuracy on all train data is 0.99509\n",
      "After 47499 training steps, accuracy on all test data is 0.713163\n",
      "After 47999 training steps, cross entropy on all train data is 0.0144852\n",
      "After 47999 training steps, accuracy on all train data is 0.99509\n",
      "After 47999 training steps, accuracy on all test data is 0.713163\n",
      "After 48499 training steps, cross entropy on all train data is 0.0143561\n",
      "After 48499 training steps, accuracy on all train data is 0.99509\n",
      "After 48499 training steps, accuracy on all test data is 0.713163\n",
      "After 48999 training steps, cross entropy on all train data is 0.0145266\n",
      "After 48999 training steps, accuracy on all train data is 0.99509\n",
      "After 48999 training steps, accuracy on all test data is 0.721022\n",
      "After 49499 training steps, cross entropy on all train data is 0.014229\n",
      "After 49499 training steps, accuracy on all train data is 0.99509\n",
      "After 49499 training steps, accuracy on all test data is 0.717092\n",
      "After 49999 training steps, cross entropy on all train data is 0.0140615\n",
      "After 49999 training steps, accuracy on all train data is 0.99509\n",
      "After 49999 training steps, accuracy on all test data is 0.719057\n"
     ]
    }
   ],
   "source": [
    "# batch_size = 10\n",
    "Steps = 50000\n",
    "lossup = []\n",
    "# start = (i*batch_size)%1000\n",
    "# end = min(start+batch_size, 1000)\n",
    "\n",
    "init = tf.global_variables_initializer()\n",
    "with tf.Session() as sess:\n",
    "    sess.run(init)\n",
    "    for i in range(Steps):\n",
    "        \n",
    "        sess.run(train_step, feed_dict={ xTrain:x_up_train, yTrain:y_up_train})\n",
    "        lossup.append(sess.run(cross_entropy, feed_dict={xTrain:x_down_train, yTrain:y_down_train}))\n",
    "        \n",
    "        if (i+1) % 500==0:\n",
    "            total_cross_entropy = sess.run(cross_entropy, feed_dict={xTrain:x_up_train, yTrain:y_up_train})\n",
    "            print('After %d training steps, cross entropy on all train data is %g' % (i, total_cross_entropy))\n",
    "            total_train_accuracy = sess.run(accuracy, feed_dict={xTrain:x_up_train, yTrain:y_up_train})\n",
    "            print('After %d training steps, accuracy on all train data is %g' % (i, total_train_accuracy))\n",
    "            total_test_accuracy = sess.run(accuracy, feed_dict={xTrain:x_up_test, yTrain:y_up_test})\n",
    "            print('After %d training steps, accuracy on all test data is %g' % (i, total_test_accuracy))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 67,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "After 499 training steps, cross entropy on all train data is 3.51754\n",
      "After 499 training steps, accuracy on all train data is 0.817976\n",
      "After 499 training steps, accuracy on all test data is 0.584521\n",
      "After 999 training steps, cross entropy on all train data is 0.417077\n",
      "After 999 training steps, accuracy on all train data is 0.955625\n",
      "After 999 training steps, accuracy on all test data is 0.655804\n",
      "After 1499 training steps, cross entropy on all train data is 0.380236\n",
      "After 1499 training steps, accuracy on all train data is 0.959582\n",
      "After 1499 training steps, accuracy on all test data is 0.663951\n",
      "After 1999 training steps, cross entropy on all train data is 0.352819\n",
      "After 1999 training steps, accuracy on all train data is 0.964104\n",
      "After 1999 training steps, accuracy on all test data is 0.670061\n",
      "After 2499 training steps, cross entropy on all train data is 0.343169\n",
      "After 2499 training steps, accuracy on all train data is 0.966083\n",
      "After 2499 training steps, accuracy on all test data is 0.684318\n",
      "After 2999 training steps, cross entropy on all train data is 0.335154\n",
      "After 2999 training steps, accuracy on all train data is 0.966365\n",
      "After 2999 training steps, accuracy on all test data is 0.694501\n",
      "After 3499 training steps, cross entropy on all train data is 0.322144\n",
      "After 3499 training steps, accuracy on all train data is 0.969192\n",
      "After 3499 training steps, accuracy on all test data is 0.700611\n",
      "After 3999 training steps, cross entropy on all train data is 0.318793\n",
      "After 3999 training steps, accuracy on all train data is 0.969192\n",
      "After 3999 training steps, accuracy on all test data is 0.702648\n",
      "After 4499 training steps, cross entropy on all train data is 0.294018\n",
      "After 4499 training steps, accuracy on all train data is 0.970322\n",
      "After 4499 training steps, accuracy on all test data is 0.700611\n",
      "After 4999 training steps, cross entropy on all train data is 0.287109\n",
      "After 4999 training steps, accuracy on all train data is 0.972583\n",
      "After 4999 training steps, accuracy on all test data is 0.708758\n",
      "After 5499 training steps, cross entropy on all train data is 0.284444\n",
      "After 5499 training steps, accuracy on all train data is 0.973431\n",
      "After 5499 training steps, accuracy on all test data is 0.720978\n",
      "After 5999 training steps, cross entropy on all train data is 0.242265\n",
      "After 5999 training steps, accuracy on all train data is 0.974279\n",
      "After 5999 training steps, accuracy on all test data is 0.710794\n",
      "After 6499 training steps, cross entropy on all train data is 0.231061\n",
      "After 6499 training steps, accuracy on all train data is 0.974845\n",
      "After 6499 training steps, accuracy on all test data is 0.704684\n",
      "After 6999 training steps, cross entropy on all train data is 0.205131\n",
      "After 6999 training steps, accuracy on all train data is 0.968626\n",
      "After 6999 training steps, accuracy on all test data is 0.682281\n",
      "After 7499 training steps, cross entropy on all train data is 0.179298\n",
      "After 7499 training steps, accuracy on all train data is 0.977388\n",
      "After 7499 training steps, accuracy on all test data is 0.692464\n",
      "After 7999 training steps, cross entropy on all train data is 0.173438\n",
      "After 7999 training steps, accuracy on all train data is 0.976823\n",
      "After 7999 training steps, accuracy on all test data is 0.706721\n",
      "After 8499 training steps, cross entropy on all train data is 0.167732\n",
      "After 8499 training steps, accuracy on all train data is 0.978519\n",
      "After 8499 training steps, accuracy on all test data is 0.704684\n",
      "After 8999 training steps, cross entropy on all train data is 0.165982\n",
      "After 8999 training steps, accuracy on all train data is 0.97965\n",
      "After 8999 training steps, accuracy on all test data is 0.714868\n",
      "After 9499 training steps, cross entropy on all train data is 0.165059\n",
      "After 9499 training steps, accuracy on all train data is 0.97965\n",
      "After 9499 training steps, accuracy on all test data is 0.712831\n",
      "After 9999 training steps, cross entropy on all train data is 0.163789\n",
      "After 9999 training steps, accuracy on all train data is 0.980497\n",
      "After 9999 training steps, accuracy on all test data is 0.712831\n",
      "After 10499 training steps, cross entropy on all train data is 0.156366\n",
      "After 10499 training steps, accuracy on all train data is 0.981628\n",
      "After 10499 training steps, accuracy on all test data is 0.710794\n",
      "After 10999 training steps, cross entropy on all train data is 0.21797\n",
      "After 10999 training steps, accuracy on all train data is 0.959016\n",
      "After 10999 training steps, accuracy on all test data is 0.635438\n",
      "After 11499 training steps, cross entropy on all train data is 0.248358\n",
      "After 11499 training steps, accuracy on all train data is 0.95082\n",
      "After 11499 training steps, accuracy on all test data is 0.608961\n",
      "After 11999 training steps, cross entropy on all train data is 0.143803\n",
      "After 11999 training steps, accuracy on all train data is 0.981628\n",
      "After 11999 training steps, accuracy on all test data is 0.725051\n",
      "After 12499 training steps, cross entropy on all train data is 0.136701\n",
      "After 12499 training steps, accuracy on all train data is 0.981911\n",
      "After 12499 training steps, accuracy on all test data is 0.731161\n",
      "After 12999 training steps, cross entropy on all train data is 0.134701\n",
      "After 12999 training steps, accuracy on all train data is 0.982193\n",
      "After 12999 training steps, accuracy on all test data is 0.731161\n",
      "After 13499 training steps, cross entropy on all train data is 0.133676\n",
      "After 13499 training steps, accuracy on all train data is 0.983889\n",
      "After 13499 training steps, accuracy on all test data is 0.733198\n",
      "After 13999 training steps, cross entropy on all train data is 0.13262\n",
      "After 13999 training steps, accuracy on all train data is 0.983889\n",
      "After 13999 training steps, accuracy on all test data is 0.745418\n",
      "After 14499 training steps, cross entropy on all train data is 0.137585\n",
      "After 14499 training steps, accuracy on all train data is 0.979367\n",
      "After 14499 training steps, accuracy on all test data is 0.710794\n",
      "After 14999 training steps, cross entropy on all train data is 0.131172\n",
      "After 14999 training steps, accuracy on all train data is 0.983607\n",
      "After 14999 training steps, accuracy on all test data is 0.735234\n",
      "After 15499 training steps, cross entropy on all train data is 0.130529\n",
      "After 15499 training steps, accuracy on all train data is 0.984172\n",
      "After 15499 training steps, accuracy on all test data is 0.739308\n",
      "After 15999 training steps, cross entropy on all train data is 0.13002\n",
      "After 15999 training steps, accuracy on all train data is 0.984172\n",
      "After 15999 training steps, accuracy on all test data is 0.737271\n",
      "After 16499 training steps, cross entropy on all train data is 0.129492\n",
      "After 16499 training steps, accuracy on all train data is 0.983889\n",
      "After 16499 training steps, accuracy on all test data is 0.735234\n",
      "After 16999 training steps, cross entropy on all train data is 0.129099\n",
      "After 16999 training steps, accuracy on all train data is 0.984172\n",
      "After 16999 training steps, accuracy on all test data is 0.733198\n",
      "After 17499 training steps, cross entropy on all train data is 0.128124\n",
      "After 17499 training steps, accuracy on all train data is 0.984455\n",
      "After 17499 training steps, accuracy on all test data is 0.735234\n",
      "After 17999 training steps, cross entropy on all train data is 0.12763\n",
      "After 17999 training steps, accuracy on all train data is 0.984737\n",
      "After 17999 training steps, accuracy on all test data is 0.735234\n",
      "After 18499 training steps, cross entropy on all train data is 0.127223\n",
      "After 18499 training steps, accuracy on all train data is 0.98502\n",
      "After 18499 training steps, accuracy on all test data is 0.735234\n",
      "After 18999 training steps, cross entropy on all train data is 0.126949\n",
      "After 18999 training steps, accuracy on all train data is 0.985302\n",
      "After 18999 training steps, accuracy on all test data is 0.735234\n",
      "After 19499 training steps, cross entropy on all train data is 0.126547\n",
      "After 19499 training steps, accuracy on all train data is 0.985585\n",
      "After 19499 training steps, accuracy on all test data is 0.739308\n",
      "After 19999 training steps, cross entropy on all train data is 0.128142\n",
      "After 19999 training steps, accuracy on all train data is 0.985302\n",
      "After 19999 training steps, accuracy on all test data is 0.737271\n",
      "After 20499 training steps, cross entropy on all train data is 0.126\n",
      "After 20499 training steps, accuracy on all train data is 0.985585\n",
      "After 20499 training steps, accuracy on all test data is 0.737271\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "After 20999 training steps, cross entropy on all train data is 0.129285\n",
      "After 20999 training steps, accuracy on all train data is 0.984172\n",
      "After 20999 training steps, accuracy on all test data is 0.718941\n",
      "After 21499 training steps, cross entropy on all train data is 0.125472\n",
      "After 21499 training steps, accuracy on all train data is 0.985868\n",
      "After 21499 training steps, accuracy on all test data is 0.731161\n",
      "After 21999 training steps, cross entropy on all train data is 0.125472\n",
      "After 21999 training steps, accuracy on all train data is 0.985585\n",
      "After 21999 training steps, accuracy on all test data is 0.731161\n",
      "After 22499 training steps, cross entropy on all train data is 0.125009\n",
      "After 22499 training steps, accuracy on all train data is 0.985868\n",
      "After 22499 training steps, accuracy on all test data is 0.731161\n",
      "After 22999 training steps, cross entropy on all train data is 0.125027\n",
      "After 22999 training steps, accuracy on all train data is 0.985585\n",
      "After 22999 training steps, accuracy on all test data is 0.731161\n",
      "After 23499 training steps, cross entropy on all train data is 0.124546\n",
      "After 23499 training steps, accuracy on all train data is 0.985868\n",
      "After 23499 training steps, accuracy on all test data is 0.729124\n",
      "After 23999 training steps, cross entropy on all train data is 0.124335\n",
      "After 23999 training steps, accuracy on all train data is 0.985868\n",
      "After 23999 training steps, accuracy on all test data is 0.731161\n",
      "After 24499 training steps, cross entropy on all train data is 0.124115\n",
      "After 24499 training steps, accuracy on all train data is 0.985868\n",
      "After 24499 training steps, accuracy on all test data is 0.731161\n",
      "After 24999 training steps, cross entropy on all train data is 0.117709\n",
      "After 24999 training steps, accuracy on all train data is 0.985868\n",
      "After 24999 training steps, accuracy on all test data is 0.727088\n",
      "After 25499 training steps, cross entropy on all train data is 0.11725\n",
      "After 25499 training steps, accuracy on all train data is 0.98615\n",
      "After 25499 training steps, accuracy on all test data is 0.729124\n",
      "After 25999 training steps, cross entropy on all train data is 0.117204\n",
      "After 25999 training steps, accuracy on all train data is 0.985868\n",
      "After 25999 training steps, accuracy on all test data is 0.727088\n",
      "After 26499 training steps, cross entropy on all train data is 0.143006\n",
      "After 26499 training steps, accuracy on all train data is 0.976823\n",
      "After 26499 training steps, accuracy on all test data is 0.698574\n",
      "After 26999 training steps, cross entropy on all train data is 0.116725\n",
      "After 26999 training steps, accuracy on all train data is 0.98615\n",
      "After 26999 training steps, accuracy on all test data is 0.731161\n",
      "After 27499 training steps, cross entropy on all train data is 0.116756\n",
      "After 27499 training steps, accuracy on all train data is 0.985868\n",
      "After 27499 training steps, accuracy on all test data is 0.727088\n",
      "After 27999 training steps, cross entropy on all train data is 0.116441\n",
      "After 27999 training steps, accuracy on all train data is 0.986433\n",
      "After 27999 training steps, accuracy on all test data is 0.735234\n",
      "After 28499 training steps, cross entropy on all train data is 0.110323\n",
      "After 28499 training steps, accuracy on all train data is 0.98615\n",
      "After 28499 training steps, accuracy on all test data is 0.729124\n",
      "After 28999 training steps, cross entropy on all train data is 0.109639\n",
      "After 28999 training steps, accuracy on all train data is 0.986716\n",
      "After 28999 training steps, accuracy on all test data is 0.735234\n",
      "After 29499 training steps, cross entropy on all train data is 0.129863\n",
      "After 29499 training steps, accuracy on all train data is 0.97965\n",
      "After 29499 training steps, accuracy on all test data is 0.710794\n",
      "After 29999 training steps, cross entropy on all train data is 0.10936\n",
      "After 29999 training steps, accuracy on all train data is 0.986716\n",
      "After 29999 training steps, accuracy on all test data is 0.733198\n",
      "After 30499 training steps, cross entropy on all train data is 0.109148\n",
      "After 30499 training steps, accuracy on all train data is 0.986998\n",
      "After 30499 training steps, accuracy on all test data is 0.735234\n",
      "After 30999 training steps, cross entropy on all train data is 0.0961723\n",
      "After 30999 training steps, accuracy on all train data is 0.987281\n",
      "After 30999 training steps, accuracy on all test data is 0.733198\n",
      "After 31499 training steps, cross entropy on all train data is 0.115948\n",
      "After 31499 training steps, accuracy on all train data is 0.980215\n",
      "After 31499 training steps, accuracy on all test data is 0.714868\n",
      "After 31999 training steps, cross entropy on all train data is 0.0958496\n",
      "After 31999 training steps, accuracy on all train data is 0.987564\n",
      "After 31999 training steps, accuracy on all test data is 0.735234\n",
      "After 32499 training steps, cross entropy on all train data is 0.109884\n",
      "After 32499 training steps, accuracy on all train data is 0.981345\n",
      "After 32499 training steps, accuracy on all test data is 0.720978\n",
      "After 32999 training steps, cross entropy on all train data is 0.0956132\n",
      "After 32999 training steps, accuracy on all train data is 0.987281\n",
      "After 32999 training steps, accuracy on all test data is 0.731161\n",
      "After 33499 training steps, cross entropy on all train data is 0.125078\n",
      "After 33499 training steps, accuracy on all train data is 0.975693\n",
      "After 33499 training steps, accuracy on all test data is 0.714868\n",
      "After 33999 training steps, cross entropy on all train data is 0.0953752\n",
      "After 33999 training steps, accuracy on all train data is 0.987564\n",
      "After 33999 training steps, accuracy on all test data is 0.735234\n",
      "After 34499 training steps, cross entropy on all train data is 0.0952031\n",
      "After 34499 training steps, accuracy on all train data is 0.987564\n",
      "After 34499 training steps, accuracy on all test data is 0.741344\n",
      "After 34999 training steps, cross entropy on all train data is 0.0951658\n",
      "After 34999 training steps, accuracy on all train data is 0.987846\n",
      "After 34999 training steps, accuracy on all test data is 0.741344\n",
      "After 35499 training steps, cross entropy on all train data is 0.0949631\n",
      "After 35499 training steps, accuracy on all train data is 0.987846\n",
      "After 35499 training steps, accuracy on all test data is 0.743381\n",
      "After 35999 training steps, cross entropy on all train data is 0.0948942\n",
      "After 35999 training steps, accuracy on all train data is 0.987846\n",
      "After 35999 training steps, accuracy on all test data is 0.743381\n",
      "After 36499 training steps, cross entropy on all train data is 0.102033\n",
      "After 36499 training steps, accuracy on all train data is 0.985868\n",
      "After 36499 training steps, accuracy on all test data is 0.737271\n",
      "After 36999 training steps, cross entropy on all train data is 0.0947164\n",
      "After 36999 training steps, accuracy on all train data is 0.988129\n",
      "After 36999 training steps, accuracy on all test data is 0.743381\n",
      "After 37499 training steps, cross entropy on all train data is 0.123302\n",
      "After 37499 training steps, accuracy on all train data is 0.977671\n",
      "After 37499 training steps, accuracy on all test data is 0.720978\n",
      "After 37999 training steps, cross entropy on all train data is 0.0946517\n",
      "After 37999 training steps, accuracy on all train data is 0.987846\n",
      "After 37999 training steps, accuracy on all test data is 0.743381\n",
      "After 38499 training steps, cross entropy on all train data is 0.0944159\n",
      "After 38499 training steps, accuracy on all train data is 0.988129\n",
      "After 38499 training steps, accuracy on all test data is 0.745418\n",
      "After 38999 training steps, cross entropy on all train data is 0.0943116\n",
      "After 38999 training steps, accuracy on all train data is 0.988412\n",
      "After 38999 training steps, accuracy on all test data is 0.745418\n",
      "After 39499 training steps, cross entropy on all train data is 0.0944145\n",
      "After 39499 training steps, accuracy on all train data is 0.988129\n",
      "After 39499 training steps, accuracy on all test data is 0.743381\n",
      "After 39999 training steps, cross entropy on all train data is 0.0941695\n",
      "After 39999 training steps, accuracy on all train data is 0.988412\n",
      "After 39999 training steps, accuracy on all test data is 0.745418\n",
      "After 40499 training steps, cross entropy on all train data is 0.0945857\n",
      "After 40499 training steps, accuracy on all train data is 0.988412\n",
      "After 40499 training steps, accuracy on all test data is 0.745418\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "After 40999 training steps, cross entropy on all train data is 0.0940507\n",
      "After 40999 training steps, accuracy on all train data is 0.988412\n",
      "After 40999 training steps, accuracy on all test data is 0.745418\n",
      "After 41499 training steps, cross entropy on all train data is 0.0939016\n",
      "After 41499 training steps, accuracy on all train data is 0.988694\n",
      "After 41499 training steps, accuracy on all test data is 0.747454\n",
      "After 41999 training steps, cross entropy on all train data is 0.093861\n",
      "After 41999 training steps, accuracy on all train data is 0.988412\n",
      "After 41999 training steps, accuracy on all test data is 0.745418\n",
      "After 42499 training steps, cross entropy on all train data is 0.105694\n",
      "After 42499 training steps, accuracy on all train data is 0.983607\n",
      "After 42499 training steps, accuracy on all test data is 0.729124\n",
      "After 42999 training steps, cross entropy on all train data is 0.0937497\n",
      "After 42999 training steps, accuracy on all train data is 0.988694\n",
      "After 42999 training steps, accuracy on all test data is 0.747454\n",
      "After 43499 training steps, cross entropy on all train data is 0.0935637\n",
      "After 43499 training steps, accuracy on all train data is 0.988977\n",
      "After 43499 training steps, accuracy on all test data is 0.749491\n",
      "After 43999 training steps, cross entropy on all train data is 0.0935972\n",
      "After 43999 training steps, accuracy on all train data is 0.988694\n",
      "After 43999 training steps, accuracy on all test data is 0.747454\n",
      "After 44499 training steps, cross entropy on all train data is 0.0934149\n",
      "After 44499 training steps, accuracy on all train data is 0.988977\n",
      "After 44499 training steps, accuracy on all test data is 0.749491\n",
      "After 44999 training steps, cross entropy on all train data is 0.0934856\n",
      "After 44999 training steps, accuracy on all train data is 0.988694\n",
      "After 44999 training steps, accuracy on all test data is 0.745418\n",
      "After 45499 training steps, cross entropy on all train data is 0.0932607\n",
      "After 45499 training steps, accuracy on all train data is 0.988977\n",
      "After 45499 training steps, accuracy on all test data is 0.749491\n",
      "After 45999 training steps, cross entropy on all train data is 0.0934647\n",
      "After 45999 training steps, accuracy on all train data is 0.988412\n",
      "After 45999 training steps, accuracy on all test data is 0.751527\n",
      "After 46499 training steps, cross entropy on all train data is 0.0931281\n",
      "After 46499 training steps, accuracy on all train data is 0.988977\n",
      "After 46499 training steps, accuracy on all test data is 0.749491\n",
      "After 46999 training steps, cross entropy on all train data is 0.0965367\n",
      "After 46999 training steps, accuracy on all train data is 0.987281\n",
      "After 46999 training steps, accuracy on all test data is 0.745418\n",
      "After 47499 training steps, cross entropy on all train data is 0.0929983\n",
      "After 47499 training steps, accuracy on all train data is 0.988977\n",
      "After 47499 training steps, accuracy on all test data is 0.747454\n",
      "After 47999 training steps, cross entropy on all train data is 0.0928653\n",
      "After 47999 training steps, accuracy on all train data is 0.988977\n",
      "After 47999 training steps, accuracy on all test data is 0.749491\n",
      "After 48499 training steps, cross entropy on all train data is 0.0929148\n",
      "After 48499 training steps, accuracy on all train data is 0.988977\n",
      "After 48499 training steps, accuracy on all test data is 0.749491\n",
      "After 48999 training steps, cross entropy on all train data is 0.0927683\n",
      "After 48999 training steps, accuracy on all train data is 0.988977\n",
      "After 48999 training steps, accuracy on all test data is 0.747454\n",
      "After 49499 training steps, cross entropy on all train data is 0.0928114\n",
      "After 49499 training steps, accuracy on all train data is 0.988977\n",
      "After 49499 training steps, accuracy on all test data is 0.747454\n",
      "After 49999 training steps, cross entropy on all train data is 0.0926346\n",
      "After 49999 training steps, accuracy on all train data is 0.988977\n",
      "After 49999 training steps, accuracy on all test data is 0.745418\n"
     ]
    }
   ],
   "source": [
    "# batch_size = 10\n",
    "Steps = 50000\n",
    "lossdown = []\n",
    "# start = (i*batch_size)%1000\n",
    "# end = min(start+batch_size, 1000)\n",
    "\n",
    "init = tf.global_variables_initializer()\n",
    "with tf.Session() as sess:\n",
    "    sess.run(init)\n",
    "    for i in range(Steps):\n",
    "        \n",
    "        sess.run(train_step, feed_dict={ xTrain:x_down_train, yTrain:y_down_train})\n",
    "        lossdown.append(sess.run(cross_entropy, feed_dict={xTrain:x_down_train, yTrain:y_down_train}))\n",
    "        \n",
    "        if (i+1) % 500==0:\n",
    "            total_cross_entropy = sess.run(cross_entropy, feed_dict={xTrain:x_down_train, yTrain:y_down_train})\n",
    "            print('After %d training steps, cross entropy on all train data is %g' % (i, total_cross_entropy))\n",
    "            total_train_accuracy = sess.run(accuracy, feed_dict={xTrain:x_down_train, yTrain:y_down_train})\n",
    "            print('After %d training steps, accuracy on all train data is %g' % (i, total_train_accuracy))\n",
    "            total_test_accuracy = sess.run(accuracy, feed_dict={xTrain:x_down_test, yTrain:y_down_test})\n",
    "            print('After %d training steps, accuracy on all test data is %g' % (i, total_test_accuracy))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 69,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from matplotlib import pyplot as plt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 70,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA4IAAAHwCAYAAADzfNGCAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xd8nNWd7/Hvb9Qsy02y5N5xAdMMOMammB56CCTZQF4h\nIWHjFLIpu+EuSRaSsMkNKTd7b5LdAGmkUhJqYjA1dDAY2xj3XiTbalbvmjn3jxmNJdmyZXvmPI+k\nz/v10kszzzwzz0+yLc9X55zfMeecAAAAAAADRyToAgAAAAAAfhEEAQAAAGCAIQgCAAAAwABDEAQA\nAACAAYYgCAAAAAADDEEQAAAAAAYYgiAAYMAyswwzqzezSak89yjq+K6Z3Zfq1wUAoCeZQRcAAEBv\nmVl9p7uDJbVIiibuf9Y596cjeT3nXFTSkFSfCwBA2BEEAQB9hnMuGcTMbLukf3bOPdfT+WaW6Zxr\n91EbAAB9CVNDAQD9RmKK5YNmdr+Z1Un6uJktMLM3zazazPaY2U/NLCtxfqaZOTObkrj/x8TjT5lZ\nnZm9YWZTj/TcxOOXm9lGM6sxs5+Z2WtmdlMvv45rzWxNouYXzGxWp8e+YWa7zazWzNab2fmJ4/PN\nbHnieKmZ/SgF31IAQD9FEAQA9DfXSvqzpOGSHpTULunLkgolnS3pMkmfPcTzPybpdkkFknZK+s8j\nPdfMRkl6SNKtietukzSvN8Wb2QmS/iDpXyQVSXpO0hNmlmVmJyZqP905N0zS5YnrStLPJP0ocXy6\npL/25noAgIGJIAgA6G9edc79zTkXc841Oefeds4tdc61O+e2SrpX0nmHeP5fnXPLnHNtkv4kac5R\nnHuVpJXOuccTj/2XpIpe1n+9pCeccy8knnuX4qH2TMVD7SBJJyamvW5LfE2S1CZphpmNdM7VOeeW\n9vJ6AIABiCAIAOhvdnW+Y2bHm9liM9trZrWS7lR8lK4nezvdbtShG8T0dO64znU455yk4l7U3vHc\nHZ2eG0s8d7xzboOkf1P8ayhLTIEdkzj1U5JmS9pgZm+Z2RW9vB4AYAAiCAIA+hvX7f49klZLmp6Y\nNnmHJEtzDXskTei4Y2YmaXwvn7tb0uROz40kXqtEkpxzf3TOnS1pqqQMSd9PHN/gnLte0ihJ/0fS\nw2Y26Ni/FABAf0QQBAD0d0Ml1UhqSKy/O9T6wFT5u6TTzexqM8tUfI1iUS+f+5CkD5jZ+YmmNrdK\nqpO01MxOMLMLzCxHUlPiIyZJZnajmRUmRhBrFA/EsdR+WQCA/oIgCADo7/5N0icVD1P3KN5AJq2c\nc6WSPirpJ5IqJR0naYXi+x4e7rlrFK/3F5LKFW9u84HEesEcST9UfL3hXkn5kr6ZeOoVktYluqX+\nWNJHnXOtKfyyAAD9iMWXLQAAgHQxswzFp3x+2Dn3StD1AADAiCAAAGlgZpeZ2YjENM7bFe/q+VbA\nZQEAIIkgCABAupwjaavi0zsvlXStc+6wU0MBAPCBqaEAAAAAMMAwIggAAAAAAwxBEAAAAAAGmMyg\nC0ilwsJCN2XKlKDLAAAAAIBAvPPOOxXOucPuXduvguCUKVO0bNmyoMsAAAAAgECY2Y7enMfUUAAA\nAAAYYAiCAAAAADDApC0ImtlvzKzMzFZ3OvYRM1tjZjEzm3uI5243s/fMbKWZMdcTAAAAAFIonSOC\n90m6rNux1ZKuk/RyL55/gXNujnOux8AIAAAAADhyaWsW45x72cymdDu2TpLMLF2XBQAAAAAcRljX\nCDpJz5nZO2a2KOhiAAAAAKA/Cev2Eec450rMbJSkZ81svXPuoNNJE0FxkSRNmjTJZ40AAAAA0CeF\nckTQOVeS+Fwm6VFJ8w5x7r3OubnOublFRYfdNxEAAAAABrzQBUEzyzOzoR23Jb1f8SYzAAAAAIAU\nSOf2EfdLekPSLDMrNrObzexaMyuWtEDSYjN7OnHuODN7MvHU0ZJeNbN3Jb0labFzbkm66gQAAACA\ngSadXUNv6OGhRw9y7m5JVyRub5V0arrqAgAAAICBLnRTQwEAAAAA6UUQBAAAAIABhiAIAAAAAAMM\nQRAAAAAABhiCIAAAAAAMMATBNIrGnGoa29TSHg26FAAAAABIIgimUUlVk0698xn97d09QZcCAAAA\nAEkEwTQyi392zgVbCAAAAAB0QhBMo/ZYPAAu31kVcCUAAAAAsB9BMI12VDZIku5/a1fAlQAAAADA\nfgTBNJo5eqgk6YJZRQFXAgAAAAD7EQTTKC8nU5I0ODsz4EoAAAAAYD+CYBpFEs1iFr+3R9WNrcEW\nAwAAAAAJBME0so62oZIeW1ESYCUAAAAAsB9BMI0i+3OgXt1cEVwhAAAAANAJQTCNTPuTYGNrNMBK\nAAAAAGA/gmAadZoZqhibygMAAAAICYJgGnUOguNG5AZXCAAAAAB0QhBMo0inJHjy+OEBVgIAAAAA\n+xEE06jTgKCiMaaGAgAAAAgHgmAaZWZE9M/nTJVEEAQAAAAQHgTBNPvapbMkSVGaxQAAAAAICYJg\nmnWsE4wxIggAAAAgJAiCaZaZ2FW+nSAIAAAAICQIgmkWiTAiCAAAACBcCIIeZESMNYIAAAAAQoMg\n6EFGxBSNBV0FAAAAAMQRBD3IMFM0RhIEAAAAEA4EQQ8YEQQAAAAQJgRBDyImxVgjCAAAACAkCIIe\nZGZE1M7UUAAAAAAhQRD0IGJMDQUAAAAQHgRBDzIi7CMIAAAAIDwIgh5kRiLsIwgAAAAgNAiCHkQi\nUpQRQQAAAAAhQRD0IL6PIEEQAAAAQDgQBD2IRIypoQAAAABCgyDoQWbEFI0SBAEAAACEA0HQg4gx\nIggAAAAgPAiCHmREjO0jAAAAAIQGQdCDTNYIAgAAAAgRgqAHkQhdQwEAAACEB0HQA7aPAAAAABAm\nBEEPGBEEAAAAECYEQQ8yCYIAAAAAQoQg6EEGzWIAAAAAhAhB0IOIsX0EAAAAgPAgCHrA9hEAAAAA\nwiRtQdDMfmNmZWa2utOxj5jZGjOLmdncQzz3MjPbYGabzey2dNXoSyRiao8SBAEAAACEQzpHBO+T\ndFm3Y6slXSfp5Z6eZGYZkv5b0uWSZku6wcxmp6lGLzLMFGNEEAAAAEBIpC0IOudelrSv27F1zrkN\nh3nqPEmbnXNbnXOtkh6QdE2ayvQig66hAAAAAEIkjGsEx0va1el+ceLYQZnZIjNbZmbLysvL017c\n0SAIAgAAAAiTMAbBI+Kcu9c5N9c5N7eoqCjocg6K7SMAAAAAhEkYg2CJpImd7k9IHOuz4ttHBF0F\nAAAAAMSFMQi+LWmGmU01s2xJ10t6IuCajkkmU0MBAAAAhEg6t4+4X9IbkmaZWbGZ3Wxm15pZsaQF\nkhab2dOJc8eZ2ZOS5Jxrl/RFSU9LWifpIefcmnTV6UMkYmonCAIAAAAIicx0vbBz7oYeHnr0IOfu\nlnRFp/tPSnoyTaV5lxER20cAAAAACI0wTg3tdzKMqaEAAAAAwoMg6EFGJEIQBAAAABAaBEEPMiIi\nCAIAAAAIDYKgBxH2EQQAAAAQIgRBDzIjphgjggAAAABCgiDoQYaxfQQAAACA8CAIehCJmCQxKggA\nAAAgFAiCHmRYPAiyThAAAABAGBAEPcjISARBRgQBAAAAhABB0IPapnZJUls0FnAlAAAAAEAQ9OLu\nl7ZIkv6xoTzgSgAAAACAIOhVazsjggAAAACCRxD0KCuxVhAAAAAAgkQQ9ODbV8+WJE0rHBJwJQAA\nAABAEPRiYsFgSZITXUMBAAAABI8g6EHE2D4CAAAAQHgQBD2IROJBMMaG8gAAAABCgCDoQYZ1BMGA\nCwEAAAAAEQS9SAwIMjUUAAAAQCgQBD1ITg0lCAIAAAAIAYKgBxkRpoYCAAAACA+CoAfJqaE0iwEA\nAAAQAgRBDzq2j2BqKAAAAIAwIAh60DE1lGYxAAAAAMKAIOhBckSQqaEAAAAAQoAg6AFBEAAAAECY\nEAQ92D81NOBCAAAAAEAEQS8yEt9lRgQBAAAAhAFB0ANjaigAAACAECEIepBhdA0FAAAAEB4EQQ/Y\nPgIAAABAmBAEPUgMCIqZoQAAAADCgCDoQXJEkCQIAAAAIAQIgh6wRhAAAABAmBAEPYgkRgQdI4IA\nAAAAQoAg6EGEEUEAAAAAIUIQ9CA5NZQcCAAAACAECIIeRBLfZaaGAgAAAAgDgqAHTA0FAAAAECYE\nQQ/YPgIAAABAmBAEPegYEYwxIggAAAAgBAiCHiQGBEUOBAAAABAGBEEPklNDSYIAAAAAQoAg6IGZ\nyUyKsUYQAAAAQAgQBD3JMGNEEAAAAEAoEAQ9iUSMrqEAAAAAQoEg6EmGGV1DAQAAAIRC2oKgmf3G\nzMrMbHWnYwVm9qyZbUp8zu/hudvN7D0zW2lmy9JVo0+ZEVM7QRAAAABACKRzRPA+SZd1O3abpOed\nczMkPZ+435MLnHNznHNz01SfV5EII4IAAAAAwiFtQdA597Kkfd0OXyPpd4nbv5P0wXRdP2wyWSMI\nAAAAICR8rxEc7Zzbk7i9V9LoHs5zkp4zs3fMbJGf0tIrI0LXUAAAAADhkBnUhZ1zzsx6SkbnOOdK\nzGyUpGfNbH1ihPEAiaC4SJImTZqUpmqPXWbE1B4lCAIAAAAInu8RwVIzGytJic9lBzvJOVeS+Fwm\n6VFJ83p6Qefcvc65uc65uUVFRWkoOTUyMhgRBAAAABAOvoPgE5I+mbj9SUmPdz/BzPLMbGjHbUnv\nl7S6+3l9TWYkQtdQAAAAAKGQzu0j7pf0hqRZZlZsZjdLukvSJWa2SdLFifsys3Fm9mTiqaMlvWpm\n70p6S9Ji59ySdNXpC2sEAQAAAIRF2tYIOudu6OGhiw5y7m5JVyRub5V0arrqCkqGmdpjsaDLAAAA\nAADvU0MHLEYEAQAAAIQFQdCTTJrFAAAAAAgJgqAnGRGjWQwAAACAUCAIepLJ1FAAAAAAIUEQ9IQR\nQQAAAABhQRD0JDMSYUQQAAAAQCgQBD1hRBAAAABAWBAEPYlvH8E+ggAAAACCRxD0JCNiao8yIggA\nAAAgeARBTzIjppgjCAIAAAAIHkHQE9YIAgAAAAgLgqAn7CMIAAAAICwIgp5kRCKsEQQAAAAQCgRB\nT7IzTa1RuoYCAAAACB5B0JOImWJMDQUAAAAQAgRBTyJmIgYCAAAACAOCoCdmYvsIAAAAAKFAEPTE\nJJEDAQAAAIQBQdATM5MjCQIAAAAIAYKgJ2aMCAIAAAAIB4KgJyaaxQAAAAAIB4KgJxETU0MBAAAA\nhAJB0JN419CgqwAAAAAAgqA3ZibH5FAAAAAAIUAQ9IRmMQAAAADCgiDoickIggAAAABCgSDoiZmY\nGgoAAAAgFAiCnkRoFgMAAAAgJAiCnsSnhpIEAQAAAASPIOhJxMTEUAAAAAChQBD0xWgWAwAAACAc\nCIKeWOIz00MBAAAABI0g6EnE4lGQHAgAAAAgaARBTxI5UDGSIAAAAICAEQQ9SU4NDbQKAAAAACAI\nehOJMDUUAAAAQDgQBD1jaigAAACAoBEEPelYIwgAAAAAQSMIekLXUAAAAABhQRD0pGNAkKmhAAAA\nAIJGEPSkY2ooMRAAAABA0AiCnnRMDWVEEAAAAEDQCIKekQMBAAAABI0g6EmEuaEAAAAAQoIg6ElH\nDmRqKAAAAICgEQQ96egaSgwEAAAAEDSCoCeRSMc+gkRBAAAAAMFKWxA0s9+YWZmZre50rMDMnjWz\nTYnP+T089zIz22Bmm83stnTV6NP+fQQDLQMAAAAA0joieJ+ky7odu03S8865GZKeT9zvwswyJP23\npMslzZZ0g5nNTmOdfiQWCTomhwIAAAAIWNqCoHPuZUn7uh2+RtLvErd/J+mDB3nqPEmbnXNbnXOt\nkh5IPK9Pi7BIEAAAAEBI+F4jONo5tydxe6+k0Qc5Z7ykXZ3uFyeO9Wmmjg3lAy4EAAAAwIAXWLMY\nF++acsyxyMwWmdkyM1tWXl6egsrSY/82giRBAAAAAMHyHQRLzWysJCU+lx3knBJJEzvdn5A4dlDO\nuXudc3Odc3OLiopSWmwqdUwNpWkoAAAAgKD5DoJPSPpk4vYnJT1+kHPeljTDzKaaWbak6xPP69P2\nTw0lCQIAAAAIVjq3j7hf0huSZplZsZndLOkuSZeY2SZJFyfuy8zGmdmTkuSca5f0RUlPS1on6SHn\n3Jp01ekNI4IAAAAAQiIzXS/snLuhh4cuOsi5uyVd0en+k5KeTFNpgYh0bB9BEAQAAAAQsMCaxQw0\n+3ePIAkCAAAACBZB0JNI4jvNiCAAAACAoBEEPaFZDAAAAICwIAh6sn8fQQAAAAAIFkHQE6NZDAAA\nAICQIAh6kmwWQxIEAAAAEDCCoCdMDQUAAAAQFgRBT9hHEAAAAEBYEAQ96ZgaStdQAAAAAEEjCHqS\nnBpKDgQAAAAQMIKgJ8muoawSBAAAABAwgqAn+7uGBloGAAAAABAEfWEfQQAAAABhQRD0JJIYEqRZ\nDAAAAICgEQQ9YR9BAAAAAGFBEPRk/9RQoiAAAACAYBEEPdm/j2CgZQAAAAAAQdCXjhFBJocCAAAA\nCBpB0JMIG8oDAAAACAmCoCeWmBzK1FAAAAAAQSMIepLsGsqQIAAAAICAEQQ9YfsIAAAAAGFBEPRk\n/9RQoiAAAACAYBEEPaFpKAAAAICwIAh60pEDV+yqDrQOAAAAACAIelLb3C5J+tHTGwKuBAAAAMBA\nRxD0JLNjI0EAAAAACFivgqCZHWdmOYnb55vZl8xsRHpL618ciwMBAAAAhERvRwQflhQ1s+mS7pU0\nUdKf01YVAAAAACBtehsEY865dknXSvqZc+5WSWPTVxYAAAAAIF16GwTbzOwGSZ+U9PfEsaz0lAQA\nAAAASKfeBsFPSVog6XvOuW1mNlXSH9JXVv/DPvIAAAAAwiKzNyc559ZK+pIkmVm+pKHOuR+kszAA\nAAAAQHr0tmvoi2Y2zMwKJC2X9Esz+0l6SwMAAAAApENvp4YOd87VSrpO0u+dc2dKujh9ZQEAAAAA\n0qW3QTDTzMZK+iftbxYDAAAAAOiDehsE75T0tKQtzrm3zWyapE3pKwsAAAAAkC69bRbzF0l/6XR/\nq6QPpauo/oiuoQAAAADCorfNYiaY2aNmVpb4eNjMJqS7OAAAAABA6vV2auhvJT0haVzi42+JYwAA\nAACAPqa3QbDIOfdb51x74uM+SUVprAsAAAAAkCa9DYKVZvZxM8tIfHxcUmU6C+tvFs6M5+bzZ5Gf\nAQAAAASrt0Hw04pvHbFX0h5JH5Z0U5pq6peyMyMaNTRHY4YNCroUAAAAAANcr4Kgc26Hc+4Dzrki\n59wo59wHRdfQI2ZG91AAAAAAwevtiODB/GvKqhggTCYnkiAAAACAYB1LELSUVTFAGN8xAAAAACFw\nLEGQoa2jwNRQAAAAAEE7ZBA0szozqz3IR53i+wkeFTP7spmtNrM1ZvaVgzx+vpnVmNnKxMcdR3ut\nMDGRngEAAAAEL/NQDzrnhqb6gmZ2kqTPSJonqVXSEjP7u3Nuc7dTX3HOXZXq6wfJzBgRBAAAABC4\nY5kaerROkLTUOdfonGuX9JKk6wKoIxA0iwEAAAAQtCCC4GpJ55rZSDMbLOkKSRMPct5ZZrbKzJ4y\nsxP9lpgextxQAAAAACFwyKmh6eCcW2dmP5D0jKQGSSslRbudtlzSJOdcvZldIekxSTMO9npmtkjS\nIkmaNGlS2upOBTNyIAAAAIDgBTEiKOfcr51zZzjnFkqqkrSx2+O1zrn6xO0nJWWZWWEPr3Wvc26u\nc25uUVFR2ms/FsaOGwAAAABCIJAgaGajEp8nKb4+8M/dHh9jFt91z8zmKV5npe8608HRLQYAAABA\nwLxPDU142MxGSmqTdItzrtrMPidJzrm7JX1Y0ufNrF1Sk6TrXT9IUEwNBQAAABAGgQRB59y5Bzl2\nd6fbP5f0c69FeWBiQ3kAAAAAwQtkauhAZWaMCAIAAAAIHEHQo/iIIFEQAAAAQLAIgj6xRhAAAABA\nCBAEPWLzCAAAAABhQBD0jSFBAAAAAAEjCHoUbxZDEgQAAAAQLIKgR2wfAQAAACAMCIIemREEAQAA\nAASPIOiRiamhAAAAAIJHEPSIEUEAAAAAYUAQBAAAAIABhiDoGQOCAAAAAIJGEPTIzJgaCgAAACBw\nBEGPTBJjggAAAACCRhD0iGYxAAAAAMKAIOiRGeOBAAAAAIJHEPTIZHIMCQIAAAAIGEHQI7OgKwAA\nAAAAgqB3jAcCAAAACBpB0CMTzWIAAAAABI8g6JMZI4IAAAAAAkcQ9Cg+IkgUBAAAABAsgqBHNIsB\nAAAAEAYEQY9YIwgAAAAgDAiCHhlDggAAAABCgCDomaNdDAAAAICAEQQ9YmooAAAAgDAgCHpkRhAE\nAAAAEDyCoEcmY2ooAAAAgMARBH1iRBAAAABACBAEPTKJ8UAAAAAAgSMIesTuEQAAAADCgCDoG0OC\nAAAAAAJGEPSIZjEAAAAAwoAg6BHbRwAAAAAIA4KgR2bMDAUAAAAQPIKgRyaTY0gQAAAAQMAIgh7R\nNRQAAABAGBAEPWM8EAAAAEDQCIKeMTMUAAAAQNAIgh6ZGSOCAAAAAAJHEPTIJIYEAQAAAASOIOgR\n20cAAAAACAOCoEcmBgQBAAAABI8g6JGxfwQAAACAECAIeuaYHAoAAAAgYARBj5gaCgAAACAMCIIe\nmREEAQAAAAQvkCBoZl82s9VmtsbMvnKQx83Mfmpmm81slZmdHkSdqcc+ggAAAACC5z0ImtlJkj4j\naZ6kUyVdZWbTu512uaQZiY9Fkn7htcg0iY8IEgUBAAAABCuIEcETJC11zjU659olvSTpum7nXCPp\n9y7uTUkjzGys70JTjZ6hAAAAAMIgiCC4WtK5ZjbSzAZLukLSxG7njJe0q9P94sSxA5jZIjNbZmbL\nysvL01JwqrB7BAAAAIAw8B4EnXPrJP1A0jOSlkhaKSl6DK93r3NurnNublFRUYqqTB9mhgIAAAAI\nWiDNYpxzv3bOneGcWyipStLGbqeUqOso4YTEsT7NZOwjCAAAACBwQXUNHZX4PEnx9YF/7nbKE5I+\nkegeOl9SjXNuj+cyU47tIwAAAACEQWZA133YzEZKapN0i3Ou2sw+J0nOubslPan42sHNkholfSqg\nOlPKTIwHAgAAAAhcIEHQOXfuQY7d3em2k3SL16I8MBnbRwAAAAAIXCBTQwcsRgQBAAAAhABB0CN2\njwAAAAAQBgRB3xgSBAAAABAwgqBHZkYOBAAAABA4gqBHJtEsBgAAAEDgCIIesX0EAAAAgDAgCHoU\nHxEMugoAAAAAAx1B0KP4GkGSIAAAAIBgEQQ9YvsIAAAAAGFAEPSMqaEAAAAAgkYQ9MkIggAAAACC\nRxD0yJgcCgAAACAECIIembGPIAAAAIDgEQQ9MrGPIAAAAIDgEQQ9MtYIAgAAAAiBzKALGEheWF+u\nivqWoMsAAAAAMMAxIugRIRAAAABAGBAEAQAAAGCAIQgCAAAAwABDEPToY2dOCroEAAAAACAI+jQo\nM0NDc+jPAwAAACBYBEGPzKQY+0cAAAAACBhB0KOIsaE8AAAAgOARBD0yM0YEAQAAAASOIOiRmUQO\nBAAAABA0gqBHJiMIAgAAAAgcQdCj+BpBkiAAAACAYBEEPYp3DQ26CgAAAAADHUHQo/jUUJIgAAAA\ngGARBD1i+wgAAAAAYUAQ9MloFgMAAAAgeARBjyIW/8z0UAAAAABBIgh6ZIonQRrGAAAAAAgSQdAj\nRgQBAAAAhAFB0CNLBEFGBAEAAAAEiSDokSWSIJvKAwAAAAgSQdAjS04NDbYOAAAAAAMbQdCjjmYx\nBEEAAAAAQSIIehRJrhEkCQIAAAAIDkHQo+TU0GDLAAAAADDAEQQ9inQ0i2FEEAAAAECACIIBYPsI\nAAAA9Cft0Zi+9fhqldY2B10Keokg6FGEuaEAAADoh17ZXKHfvbFDX3/kvaBLQS8RBD0ymsUAAACg\nP0q8vY0y9a3PIAh6lFwjGHAdAAAAQEox8a3PIQh6xIggAAAA+qPE21yaIvYhBEGPzNhQHgAAAP1P\nx/tc9B2BBEEz+6qZrTGz1WZ2v5kN6vb4+WZWY2YrEx93BFFnqvGbEgAAAByrFzeUaXNZXdBldLH/\nfW6gZRzSn5fu1M9f2BR0GaGR6fuCZjZe0pckzXbONZnZQ5Kul3Rft1Nfcc5d5bu+dKJpKAAAAI7V\nTb99W5K0/a4rA65kv/3vc8P7Tvcbj8Y7mn7xwhkBVxIOQU0NzZSUa2aZkgZL2h1QHV5FmBoKAACA\nfsjE+9y+xnsQdM6VSPqxpJ2S9kiqcc49c5BTzzKzVWb2lJmd6LXINIkkflMS5V8IAABAj5rbovrQ\nL17XquLqoEtBL7FEsO/xHgTNLF/SNZKmShonKc/MPt7ttOWSJjnnTpH0M0mPHeL1FpnZMjNbVl5e\nnq6yU6JjRDDG/ioAAAA9WrO7Vu/sqNK3nlgTdCk4Qox39B1BTA29WNI251y5c65N0iOSzup8gnOu\n1jlXn7j9pKQsMys82Is55+51zs11zs0tKipKd+3HhKmhAAAAfcvqkhp96f4VbJR+GGyT1vcEEQR3\nSppvZoMt3mf2IknrOp9gZmMSj8nM5ileZ6X3SlOMfyAAAAB9yxf+tFxPvLtbxVWNQZcSbom3t0u3\n7Qu2DvSa966hzrmlZvZXxad/tktaIeleM/tc4vG7JX1Y0ufNrF1Sk6TrXT/YcyE5IhhwHQAAAOid\nuua2oEvoExgw7Xu8B0FJcs59S9K3uh2+u9PjP5f0c69FecCIIAAAQN9S1RgPgtWNbZo8MuBigBQK\navuIAclYIwgAANAnLd3W51cpAV0QBD3q6KrbD2a5AgDQb/zrgyv1/afWHf7Efq62uU1vbw/X+q6q\nhtagS0gysT/CofSl7SPaorGgSwgFgqBHrBEEACB8HllRonte2hp0GYH77O/f0UfufkP1Le1Bl5L8\npfn2yvA0aOlLQScIfWmco6ktGnQJoUAQ9Ig1ggAAIKzW7a2VJLW1Bz9aQuORvoeg3PcQBD2KJP6B\nkAMBAEDHzLNMAAAgAElEQVRPmgMareiYuRSGX1izjKbv6Us5kL9ecQRBjyxEP2ABAED4vLC+VMff\nvkQrdlZ5v3YkOXPJ+6UPEIISDsDbt0Mrr28JugQcIYKgR/ubxQRaBgAAofL65gpV8CZSkvTyxgpJ\n0oqd1QFcvaO7efBvVEJQwgE2lNYFXUKovb6573RVreTnjaSA9hEcqCJsHwEAwAE+9qulmlqYp398\n7fygSwmNIN4qJJewBHDt7lwoquhqVXEQ4bzvKBqaE3QJvfbG1kpNKxpyxM9raY9q7e5a7WtoVVs0\npjMmF/Spr7s7gqBHNIsBAITRe8U1uv3x1Xpg0XwNysoIpIZtFQ2BXDdsgmy4EaY1giHMgTiM6qbw\nbPVxOEcz/bktGtMV/+8VbSnf/7Pqvk+9T+fPGpXCyvwiCHrE9hEAgEMpqW7S+BG53q/7nb+t0cpd\n1XqvpEbvm1Lg/foIB9YIHprvfOycUzTmVNvcrnV7amWS1u6p1XcXp3fPy2jMqS0aU1ZGRBGL/32I\nOaf2qJOTU1NrVDVNbWpoiaqivkWltc3aVdWoP765M611ddfSHtWufY1qbotpSE6mhuVmaXB2hnIy\nI3JOikRMzrlkj472Y9g7sD0a0x/f3KEt5Q269dJZOnt6obIzIpo0cnCqvpxAEAR9YkQQANCD59eV\n6ubfLdMvPzFXl8weHUgNsTAkAARmd02zpHD8PejPb5Vqm9v0xzd36PXNlXp3V7XqWtplFv+ah+Zk\nKuacGlr9do6ta27Tj5/eoKXb9mljaV0ofhnQE+ec/uX+FVqyeq/aD1FoZsR6fPy1TRW6cf7kHp/b\n2NquDXvr9M6OKr22uUJLt+1TY2tUY4cP0s3nTA1s5kSqEQQ9Yo0gAKAn75XUxD8XV3sPgst2xDtU\nvrq5QmdOG+n12mFoTBJGQW0hIcVHhIK2raI+sGtHY0679jVqc1m9NpXtryNV35WfPrdJv3p1m2aP\nHaYLTxiliJkKh2SrtT2WHL16bXOFLj1xjLIyIsrLyVBxVZMm5OfKzJRh0rf/tjZF1cT9/o0d+t0b\nO3TezCKdP2uUBmdnqD0arydipoxIvPt9XXO7huRkqGhojtqiTuPzczUxP1f5g7P1v/66Ss+vL0tp\nXQfz+pZK/X3VHl196jhdeHyRIom62qMxNbZFVVzVpMyIySStKqnRyLwc7Wto0djhuVr83h5J0pI1\ne5Ov55zTmt21emljud7dVa0NpXXaua8x+X598sjB+vAZE3Tm1JFaOLOw34RAiSDo1f6uocH/gAUA\nhIsp+OUDO/c1Bnh1SNK+hvg6qx89vUG3XDA9kBrCMHNpa5rXjDa0tGtXVaN27WvSrn2NXW5vq2xQ\na/uB0wiP9f2bc06rS2r18PJiLZg2Uvcvmn/Ur5WKINgejWnN7lq9vLFc972+XTNHD9HvPj3vqF/v\n2tPHpyUIRmNOq0tq9OzaUj2/vkzr9tQqNytD37p6tgqHHFmjlsW3LU7e/unzm7SprF5Lt1aqrC7e\nRXRqYZ5OHDdM1502QbPGDNFpk/I1etiglH49YUIQDEBbNPgfsACAcOloEhLke/AgBoJCkDlCpaEl\nuJHADst2VB1VR8Wj4Vx8DVx5XYv21DTpr+8Uqz3qkiM3krRk9V5ddtKYI37d8roWbato0I59jSqu\nalJJVZO2VtRrZ2WjKhu6NjYZnJ2hifmDNbEgV+fNKtL0oiGaPnqIjisaolO/84wkdWkSciR1bCyt\n1xPvluip9/Zqa0WDsjJMHz5jwhG/1rFyzmnXvia9uLFMb2yp1JtbK1XV2CZJOnn8cH3jihOO6fUj\nKex0tLu6Sa9urtBrmyv06qaK5J/XmVMLdOuls3TpiWOOOAR295NnN2r8iFzNm1qg82YW6YLjRx3z\na/Y1BEGPHlq2S5L0m9e2acFxfqfeAADCLcBmkUmrE9NTfSIHdhVE11DnnFo6jYCV1Tan7LUbW+Mh\nb1NpvSobWrS3pkWldc3aUdmg0toW7a5uUuNh1sN97o/v6KHPLlBWRqLpR8wpYqa2aEw1TW2qrG9V\nZX2LyupaVFLdpJ37GlVS1aSmTtNrzaTCITk6rihP7z9xjCYW5CaC32BNzM9VQV52clrmsYrFnFYW\nV+u5taVasmavtpY3KCNimjelQJ9ZOE2XnzRGIwZnp+RaW8rrddwhQrtzTquKa/TM2r168r29ye68\n40fk6sLjR+u8WUVaMG1koFsgxGJOWyvq9da2Kq3cVaW3tu3T9sr47ITCITk6d0ahzp81SmdPL0xp\nnc/963maPsrPLzzCiiDoUWaENYIAgEMLcv80tnAIXiTNQTAWc9pe2aD1e+v0VqIxyO7qpuQbb6nn\n9ykl1U3aU92k+pZ2tUedWqMxNbVG1dQWVWNru6oa21RZ36LK+laVVDdpb22zqhMjTp0V5GVrYsFg\nTSvM08IZRRo7fJBGDcvRqKGDlJMVUWFeju5/e6d+8eKW5HP+6Z43Dvu1Dc/N0rgRuTquKE/nzSzS\nxPxcTSnM05SReRqfn6usjMiRf8N6KRZzWrGrWotX7dGT7+3R3tpmZURM86cV6FNnT9VlJ45JS9h6\nd1f1AUGwpT2qN7fu0/PrSvXc2lLtronXcubUAt101hQtnFmkKSMHpyz4dujtq8ViThvL6vTmlkq9\nuXWflm7bPzKZPzhLcyaO0I0Lpujs6SM1a/TQlNfZYaCHQIkg6NWNCybrsZW7dcO8iUGXAgAImY7R\ni3V76gKuxC/WzXdlKRwb7piWuKq4Wmt212rN7hqt3V17QEfKUycM18KZRXp5Y7kkHTBraf3eWt3+\n2Gq9vb3qkNfLiJgGZ2do/IhcTcgfrNMmjVBBXrYmF+SpaGiOphXlafSwQb1qtjGm07qsC2YV6VNn\nT1U08XclM2KKxpyyMyIalpulgrxsjRySrZxM/008NpfV68G3d2rxqj3aXdOs7IyIzptVpNtOPl7n\nzypK2chfT+pb2iXF1zz+Y0OZnlq9Vy+uL1NDa1SDsiJaOKNIX7lkpi6dPUbDB2eltZaexGJOm8rq\n9ebW+HTUpdv2JdfCTsjP1UUnjNa8qQU6Y3K+phXmpS344UAEQY8yI/HfRPH3GwDQ3dvb90mSXvDQ\ndS9dOn7T/8Bbu1Tb1KYTxw/XG1sqtXxnlaYV5un4sUOVGYnovte36+ITRmn0sEGqrO87m1D70PH3\n4Eg557S5rF6rE2FvVXGN1u+tU01TfKRlcHaGThg7TB8+Y4JOHDdcM8cM1ci8bI0fkatIYhhySqKR\nxuqSGs3ttJ/kF/60XOW1Lbrt8uM1a8xQDRuUpawMU2YkoiE5mRqUFVFOVoaG5GQqI0VDmp1H7+65\nca6yM9M3mnc0lu+s0i9e3KJn15YqK8O0cEaRvnbpLF08e7SGDfIXuO54fI3ue327dlY2qj3mVDgk\nWx+YM16XzB6ls44LrsPllNsW685rTkyEv/3Bb/yIXF0wa5QWHDdSZ04t0MSCYPbh+8k/nRrIdcOG\nIOhRxyLaY9jPEgCAlDqaTZadc9pT06x7Xtqiprao6prbtXp3jcrrWtTctv/1HllRkry9r6E1uU2F\nJD23LpjA65xTSXWTXt9Sqdc2VyRHwXxpbotqd3WTSqqbtKm0Xq9urtDQQZnata9Ry3dW9/p16prb\n9O6uGr1XUqNl2/dp2Y6qZOiTpDkTR+j9s0frtEn5mje1QFML83od0h5buVs3nT1VklRR36KtiU20\nP3fecUf2xR6DMybnJ2+HKQS+tLFc//OPzVq6bZ+G52bpSxfN0CcWTA60yciE/MG6/KQxWjijSHOn\nFKQsjB+pi07ouu3NHY+vSQa/+dMKNH/ayMCCX3fdax2oCIIeJQYEQ9GWGQAQLqmcEngosZjTs+tK\n9dk/vNPr5zjntKG0Tv9YX653duzT61squzT4mDxysE4eP1xjh8fXZLW2x3TezCIVDslWXk6mapva\nFHNSTlZETa1RtbTFtKG0Tu3RmJ5es1ePrdx92Bpqmtq0fk+ttpQ3qKyuWTVNbXJOao3GlGGWXFvp\nnLRhb51GDM7Wip1VKhqao4r6FlV4GHmMxZwq6lu0p6ZZ+xpaVVzdpOKqeOOSkuomFVc1qTzRpr6z\nQVkRDc/N0oxRQ7rsW9dZTVObXtxQpje37tOKnVXaUFqXXMs3rTBPl504RqdOHKFTJw7XrNFDlXkU\n6+HOnVGoVzZVqK45HihX7KzS1/7yrjIi8REvn/JywrlX2yd/85bGDh+k26+arevfN1F5OcG/lf79\nMWz5kErdA/vLt16giQW5oZzqOTw3mGmyYRP8394BZP+G8gRBAIA/zW1R/f6N7frfT67v9XO2lNfr\nxQ3lWlNSo2U7qpJ7DI4fkauzpxfqzKkFOmn8cJ06YYRysw/9pn1kp9GSjmlzk0bGRwYuP3nsQYNg\nbXOb1u6u1RtbKvXU6j3aWNo1IGVnRpSTEVFOVkTtMaeMTm82W9vjG0tHY/GGJh3XHJQVUXNbTOfO\nKNSVJ4/VtKIhet+UfE39+pOHrN85p5qmNlXUt6qstll7apq1t7ZZ5XUtKqtrVlltvGPl3ppmtXYb\nYc3KMI0fkavx+bm6cNYojc/P1YT83Pg6uoLBGj00p0tou+up9br7pXiTlJ2VjXpm7V49s7ZU7+yo\nUjTmNDQnU3MmjdClJ47RGZPzNXvcsJSPRm0pb9Af3tyhO/+2RoVDcvTHm8/UyROGp/QahzMhPxwj\nRwfz0q0XhGaUcuFMvwG9t1782vnJf+MIL4KgR0wNBQD40tDSrje3Vur/Pb9Jq4q7bgtxzvRCffWS\nGTp9Ur7MLLk2TJI2ltbpe4vX6aXElMmRedk6ZcJwfebcqVpw3EhNHzU0bTX/z4ubFY06/W3Vbm0q\nq5dz8S6ap0/K162XztLsccN0XOEQjRqWo5zMSNpHGnZXN+lnL2zWoyuKu0x57TA0JzPZ7fK0SSM0\nZvggTRiRqzHDc1WQl6UJ+YNVNCQnuQavN86cWpAMggt/9A9J0vFjhupz503TRSeM1qkTRqRt6l9t\np6mltz+2WhceP0r/9U9zAmsyEhajhuYkNxzffteVAVfT1R1XHdvef+mSn5feJjlH69mvLkw22AFB\n0KsMpoYCAHqSgvf2q4qr9cL6Mr22uUIrdlarPeZkJi2YNlJzp+Tr5nOmHrSL4YdOn6CHlxdLkq76\n6asanJOhWy+dpWvmjPM6MvPDJRskxcPQVy6aqZPGD9Npk/JVEMCbyhU7q/SxXy5VNOZ07WnjNXPM\nUBUOyVbR0ByNGTZIY4YP0uDs1L+NGpbb9TVfvvUCbyMrdd3eIN9z4xlp3XKhr7j5nKn6/lO9H033\nqaMRYdiEderljNHp+0VWX0QQ9KjjN5dhDYLNbVEdf/uS5P3f3vQ+nTuj8KjWGQBAX1Xb3Kbm1qhG\ndWpf78PE/MF6a9uRd4ysaWrTQ2/v0veeXJc8VpCXrZvPmaqFM4s0d0r+YdvqT+4UNCYW5OrBzy4I\nrPmFz+BzKI8sL1HEpGe/dp7XMBzpNMr57rfe7/UNdXt0//uTR79wVuAh8BtXHK/500Ye/sQ0i4b0\nfZukwBrDoH8gCHoUCXkQ7BwCJelT972dvB22qRAAkC4X/OhFVTa0ev+5N2fSCD28vLjXe83urWnW\n/7y4WQ8t25Wctvjli2boA3PGHbDB9OF0Xu/0gw+dElgI/M8PnhR4CHTO6Yl3d+uxlSWaN7XA+1q1\nzkHQ96hKbqetBk6blH+IM/1YtNBfl9JD6RyQw2JwdoYaW6OakJ8bdCnowwiCHnUsZI+FcI3got8v\nO+TjU25brG9ccbz+tHSnXrr1Ak9VAYB/lQ1+97VrbG3Xr17Zpp88u1GS9PjK3fr+daf0eH5ZXbN+\n8eIW/WnpTsUS0xZvXDBZJ48fftRr5jr/frLz/nG+nTO9MLBrS9KufY364dMb9Ld3d2v22GH62qWz\nvNcwZWSe92t2+Ox50/SvD70b2PXD6vKTxugnz27Uf300PHvPvfXNi1XV0BrKjpzoOwiCHnX8Ww3b\niOCy7fv0zNrSw553JN3mAAA921PTpM/+4R2t31N3QJfJztsydFbT2KbbH1+tZ9buVVvU6cOnT9AX\nL5yekn25rn/fRP1gSXA/46cV5WlreYOGDQr2bck3Hn1Pr2yq0K2XztLnzzvuiJq8pEqQjVnSseax\nP5gxemjoZkYNycnUkBBsXYG+jb9BHnX8hxKmIOic04fvfuOA47deOks/enrDQZ9T09TWq+kqZXXN\nci5+/lPv7dWXL55xzPUCQF+1tbxeH/vlUu2tbe5yfHhulmaNGapzphcmRwU7a2mP6g9v7NDPXtis\nmqY2XTNnnL568UxNKUzdyFF+XrZeu+3C5P5xvj39lYXaXd3UZZuJILyyqUIfOWOCbrlgeqB1BOXi\nE0bpEwsm60sX8f81jt5nzp0adAnoJYKgR8mpoSHJgc65A/ZOOm9mkX570/sUiZhuuWC6vv/kOt3z\n8tYu53zxz8v1h5vPPOzrz/ve813uf/qcKRo6qOcAWd3Yqtb2mPcGDQCQLjWNbXpkRbH+sqxYa/fU\nJo9ffeo4XXPqOF14/KjkLwl3Vzd1CYIt7VF9b/E6Pb1mr0prW3T29JH66sUz0zZ1c/yIXEnBrDfK\nyohocoBTIjv79DnBv4kNavQpMyOiO685KZBro38I28gpDo0g6FHHDJNoSJJg9xB4yezR+uUn5nY5\n9vUrTtAXL5yuk7/9TPLYK5sqjup6L2+s0JWnjO3x8TO++5yiMZfWHyKnfPtpnTE5X7/91Ly0XQPA\nwNYejenBZbv0q1e2aVtFgyRp2KBMfenC6fr4/Mk9/rKr82yRlzaW67aHV2lPTbPmTByhuz50is6f\nWcR6IA+OH0N7eQADA0HQo47/wF0IpoZ+5YEVBxzrHgI7DB2UpQcWzdf19755wGM1TW069Tv7Q+KE\n/Fw99eVzDzryd8ufl+uWP0tDB2XqqlPG6ZYLjuvSjc1HQK5tbtc/NpSn/ToABp6fPLNBz6wt1fq9\ndclj1502Xp8+Z6pOGj/8sM/v3Ejsk795S8ePGaoffOgUnTujkADoEd9rAAMFQdCjjEg4poY2t0X1\n2MrdXY4dbhSu+z4+U25bfNDziquauoweHkxdc7vuf2un7n9rp175XxekpNEBgP5ldUmNrvrZq3r0\nC2eFoo39oby+uUL/97lNemv7/j0A77hqtq47ffxBN2/vSfe9yh5ctCDQxiEDycfnT9If39ypTd+7\nPOhSAMAbgqBHYZka2n2/wCD/4zv3h//QyjsuOaI3S9srGtQajWnmaKbvAP3VP9aXSZKeW1ca2iC4\ndnetvvLgCm0srVfhkGxddPwofffakzR2+NGts+v8f8OciSMIgR7d+YGT9I0rTgh8A3UA8Ikg6FEY\nuob+8c0dXe6/+fWLev0f3/a7rlR1Y6tiTnplU7m+/MDKXl93+qgh+vu/nHNACJWkOXc+q7V3Xtrr\n1zr/xy8m6wHQP3X8vIyGcN/VlvaofvLMxmQjrX+5cLpuuWC6BnXajPtodP6/4bc3ve+YXgtHJhIx\ntk4AMODwqy+PIsk1gsFcPxZz+o/HVnc5Nmb4kXXoHDE4WwV52bpmznhtv+tKPfz5BZKkT5996C5r\nz/3reRqUlaHtd12pj8+fdMDjs+94+ojqANC/dUylD8Oa6s7e3FqpK3/6ajIE3v+Z+fq398865hAo\ndR0RHMFoIAAgzfj1l0fJqaEBvbGZ9o2uXUJTMaJ2xuSC5OvcftUJyeMfufsNLdtRJUl6cNH8Ls/5\n7gdP1k1nTdHFP3n5mK+PYLVHY8qIGM0VkHJhmUrfoaU9qh8u2aBfv7pNkwoG6zc3zdWFx49O6TXy\nOo1I8W8KAJBujAh6FLHgp4Z2ePXfL0j5a5pZ8uOvnz8refzMbo1mJGn6qKFM7ezjKutbNP2bT+nX\nr24LuhT0Qx0/L9s9BsHmtqj+8+9rk/c7tn5YuataF/74Jf361W26cf5kLfnKuSkPgZI0aeRg/fkz\nZ2rdnZel/LUBAOiOEUGPgpwa2r3LZ+dtG9LlP685UauKaw55zva7ruyxAynCbU9NsyTpkeUl+udz\npwVcTWrc9vAqPfD2Ln5JEQJbyuslSfe9vl3f/sCJabmGc07Ld1br64+s0sbS+gMeX7q1UtWNrfro\nvW8qOyOiX35iri6ZnfoA2NlZxxWm9fUBAOhAEPQoqKlOt3dbF7j89ku8XPfGBVN6dV5fDoOvbCpX\nQ0tUl500JuhSvOv4hcbaPbVpv9YL60t12sR85ef1vrvs0Xjg7V1pfX30XlltS0pfzzmnjaX1+tPS\nHXpnR5WqG9tUUt3U5Zyxwwdp0cJp+s7f4qOC2yob9IMl6zVm2CA9sGi+xo04um6gAACEEUHQo47m\nByt3VXu7Zizm9IdOnUKvPW28CtL8ZvpodA6De2uaj7iJTVBu/PVbklLfwfTBt3dqSE6WrjxlbEpf\nty+qbW7Tp+9bptMnjdAjXzg76HIGtJ+/sEmTR+bp6lPHpf1aUwrzjvq57dGYHlleosqGVpXWNmvd\nnlot3bavx/O/dfVs3Th/sjITHZQ7guA9L23ViMFZ+t2n5xECAQD9DkHQo47F/y8k9sfyoXuDmP/6\n6Bxv1z5ad7+0JW1TwfqKf3/4PUnSlaeEd4qik5+R7fZo/DpbE+u1EJwfP7NRkrwEwRPGDpMkXXzC\nqMOeG4057dzXqKVbK/XihnItWbM3+VhOZkRTC/M0JCdTH5gzTs5JV5w8RgumjUwGv0P5+JmTNfUY\nQikAAGFFEAxAx8hgh1jMJffMSqdvXz077ddIhfte3677Xt/e5VjHpvOPrywJpigEpuNfRgh6LCEA\nz63b/4uzhpZ2batoUHFVk4qrGrWptF6rSmq0rtP05OzMiC49cbTGDs/V1aeO05yJIw74mXskrjiZ\nUXkAQP9EEAzA/GkFyduNre3JPfRSPb1w7nef63L/psPs9Rdmc+589oBjbdGYsnrxG30pPqWtYzQD\nqeErmO1vskQSHCia26L62l/e7XL/jsdX67EVu9XaaYf5wdkZmpg/WBefMEpzpxRo4YwizRoz9JiC\nX3ezxw1L2WsBABAmBMEAvLa5UpvL6rSnpjm5xizV2qMxVdTvb7bw9jcvTst10mXNdy5VXk6m9tQ0\nacH3XzjoOT9+ZoO+fvkJB31Mkppao3pkRbG++ejqHs8ZKJxz+p8Xt+gjZ0zQqGGpWX/pbRuUxHt6\ncmD/1dwW1b/95V0tXrXnoI//cMkGPbSsWDfMm6hzphcpPy9LxxUNUdGQHC+zKQAA6I8IggE52Gbq\np935jF799wuVl3PsfyzTv/lUl/tFQ3OO+TV9eeizC5Lfg7HDc5Mjpd07i97z0tYDgmBre0x/eWcX\n4a+b4qom/ejpDXpmzV49/sVzUvKaDS3RlLzOYSUCYF1Lu5/rwYuqhlb93+c2akNpnd7ZUaW2aM9J\n/77Xt+mGeRP1/etO8VghAAD9G0EwRKoa23Tit55O+RTRzd+7PKWvl27vm5J/0OM9bTPhnNOflu7U\nk+/t0etbKrs89t0PnqSrTx2nnMyIPvjfr2n93jpJ0pLVe3TZSQNn7U9idqUq6ltT9pq+RgTTeZ3G\n1nbtrm7SK5squvzdOZJpx+idtmhMD769S//RbTubaYV5uumsKWppj+nz5x+nscPj3Tn31jRr/vef\nlyTFnPTFC2d4q/WaOeP0+Mrd3q4HAEAQCIIhNOW2xccUBruHpd50xguTju6qB9M9DH7/qXVasnqv\ndlQ2djnv0S+cpdMmdQ2US76yMPncd3ZUDbAgGP+eHssels45tcecyupaVF7Xop+/sDlV5SVtLK1T\nWW2LSqob9d3F61TX3LtRwI2ldfrVK1v10LJiDc7OUGNrfLRyaE5ml5HEs6eP1GubK3t6maQfPLVe\n/3FV32iuJEk3/nqpXtlU8f/bu/P4qMp7j+OfX0IgCRBCCBAkCWGJICBb2UUKghiw1+W2RbjaWttb\nrLWt9V6qKFYtFe12rbVyW6222tpbW7Vea4soIFqRHQER2cIqIRC2EELI/vSPOQmTBUjITCbJfN+v\n13nNOc85c+aZ5Je85jfPFvAvkRridFEpG/bnsmbvcVbvPlZj+Ya0TrHc/tnezBiRUuvffPVZabs3\n4vINUwcm8frGg8y/cWCjvaaIiEhjC0kiaGZ3A/+Jr9PXZuA251yh33kDfgFMAwqArzjnPgxFXRvD\nijlXMfZHVcfBNTQZrLBm7qQG36Ox7H50GmX1bP15+r3dAIzrk8iTM4fWeY3E37y/h7nXNp8P+g1V\n6k2wcSiv8AJXnlVQXMqjC7fy4qr9AatHcWk5m7NyeXPzIZ5dvodRPRNYvec4EeZr9amrkrJy1u45\nztq9J1i67TAfHTjpV+8yoiKNK9M706V9G7Zm57HJO59bUFJ53Yi0jmTm5HPCr6zCs8v3NKtE8P2d\nR0PyuuXlvnTtVGEJe46eZsWuY2Tm5LM88yhHTvnGKEeYbymIm4an0P+SOPoltWdkz4TzfuEDVceE\nrrzvqiC+i5quGZDEy98Yw/AetfdOEBERaQkaPRE0s+7Ad4D+zrkzZvYXYAbwvN9lU4F0bxsF/Mp7\nbHFe++bYcy5U3P/BRXwyL6Ne9ysuLa9y3KV981iYHSAiwoigfhM/JLZrw5L/Gk98bN0SwHBzpriM\n7YdPccOCD857XU5eIRs/zWXFrmNknzzDW1sOExlhDWpBzCssYenWw9z9503nvKYiCbyiT2KVZOYr\nY9PYffQ0URFGflFpZWvSE0t28N6OI2zYnwv4urxe3r0D92T0ZVyfRPolxdG61cW3gPu3NpeVu4DO\nPtmSLNuWw+yXN3HsdM2uxhHmm2nzc4O6MaFvF4alxtM+Oqrer+EfeRXdRRuLmTEiLeHCF4qIiDRj\noeoa2gqIMbMSIBaoPhjjeuD3zjdf/Cozizezbs652qeUa0Y2PTSFwT94u/K4evfFOyf2ZsGyXYCv\nZeJ45OkAABDhSURBVCO3oLheSc6lD5ydJGbO1H4NrG3Tt+6Bi58NtaC4lNjWoe8d3ZDxaMfyi1i/\n7wQHc8/wyocHGJwcz/9vyOJ08bknckmb8w/iolvRN6k9a/eeOOd1I9I6MqlfV64fekmtXyg8+/5u\nHvnH1sp7zhiRQkpCLEdOFfFx1kk2Z52kyPtionVkBMVl5Uy+rCt9k9rROjKSjIFJ9E1qX6f3WZGg\nPbFkJwAZA5JIS2zLrPG96twKXF83P7uK6cNTSE2IZcP+XL48tgdtWkU26J6ZOflMf3olC79zJUkd\nAv8ljXPugi1t9b3fsdPFzH55E+9uP1JZftvzawGYOTKFpLgYoqMi6NGpLQltW/OZHh0DkkBHeu8j\nvUu7Bt9LREREamr0T8HOuSwz+xmwHzgDvO2ce7vaZd2BT/2OD3hlzT4R7BATxbLZE3hhxV5mX9O3\nsnzrvAx25pxiUHJ8ZSIIvvXz6tJFNDMnn8mPv1el7OtX9gpcxZuQkT0TWLPnOA9ce+6lI+ri+RV7\n+eaEPhf13JMFJQyedzZszxSXEdO67klCzqlCRs5fWu/XzSss4fUNWew+epr1+05U6RIJ8MnBPDrE\nRFFS7iguLadnYlv2HD1dy31KOVNSRvf4GE6eKSG/qJS7JqXT/5I4xqd3rtN7GdO7U5Xjl9b6/mRj\nW0fSv1sct4zuwZT+XRnWo2NAJ17Z/khGgxOyuli1+zirdp8d1zZ/4VYS27Vm9f2TLzrReXHVPo6f\nLmbh5my+Oi7w63qWO4i8yBysoLiUD/fl8tibW9lyMO+C13/4/auDloQDJHWIZt71A5jSPyloryEi\nIhLOQtE1tCO+Fr+eQC7wspnd4px78SLvNwuYBZCamhqwegZTz8S2PHzdgCplMa0jGZQcD8CuR6fR\n+/6Fdb7f9kOnuOaJmstRtNRubf/zxcG8uHofX72i/h+kl987kWufXE65czz1TiafH5ZMV29dvTPF\nZazbd5wvPbemyoQjAFemJ9K5fRtaRRgHcwtZnll1TNZlDy6qkrAXlpTx5sfZbNify/ZDpyq7Nvbp\n0o7MnPw61ze/qJRWEcbKXcf464YsFn2cTUmZo02rCIakxHPrmB507xhD36Q4+neLI7Fd61pbhA7n\nFTLqUV/imTl/akAmEOqZ2LbK8boHJhMdFUlMVGTQYu/3Xx0Z1CRw2ewJPLFkBz+fPoT3dh5h9e7j\n9EyM5d5XNwO+WVd737+Qv9w+hpE96991sOJXE6yZUPcdO02vzudvQXPOkZmTzxsfZbP7SD7vbj9C\n/gWW5pjUrwtzpvbj6p+f/T8TzCSwwpfHpAX9NURERMJVKPrFTQb2OOeOAJjZX4GxgH8imAWk+B0n\ne2U1OOeeAZ4BGD58eItYcjoywqrMjpk25x/86euja7TAVKgtCWxKswcGWkpC7HkXkj+f5I6xbHpo\nCvuOneazP32XGxd8wEPXDeC1D7NYtOVQ5XUF1bpWrtx1jFK/8XJRkUZiuzZknzw7+cr1Cz6goKiU\nfccLaozVrJCaEEtmTj5je3fi1rFpXDOg9taOit/9wIfeqizrEBPFdYO70/+SOL40uke9xsJ19VtE\nPlCzyPp3q102ewKJ7YK/VuX4SzsH9f49E9vyixlDAZjYtwsT+3YB4KYRqeQVljDoYV8r8PSnV9Kp\nbWvWzp1crwXNK7o7FpYEZw3GY6eL6eX9iHLyClm95zgHTpxhy8GTbDmYR0lZOTl5RRSXVY3PxHZt\n+OLwZNK7tCO/qJTrBl+icbciIiItXCgSwf3AaDOLxdc1dBKwrto1fwO+ZWYv4Zsk5mRLGB9YXzeP\nSuWPq30zNs78zSo2PzylxqQLJbV8oGvIuLlw0aNTW753TV9++tZ2bv/DetpHtyI1IZZZ43uR3DGG\nsb0T65Ro+U8uUlRSxomCEiLNuDI9kYyBSXRpH82QlHg6t7/4JOnOib0ZmtKRKy9NbHBrWExUcFrT\nqrcOtkRx0VHs/dG1vLfjCLf+do0v6bp/IV3j2vDWd+s2YVHFrK0/e3tHUNbFO5ZfxOOLd/Dq+gNk\n5Z6pcb5fUnvGDUukuLScKQO6MqZXIh1i6z+Ry03DUy58kYiIiDRpoRgjuNrMXgE+BEqBDcAzZvYN\n7/yvgYX4lo7IxLd8xG2NXc+mYP6Nl1cmggCFJeVUn7Mjfe7ZyWF2PDK1QTMmhps7J/YhrVNbsk+e\n4eZRPeo1xq+60b0SeGnWmADW7qzvXROYSX8W3z3+oj70S1WfvbQz236YwS3PrmbdvhMczitiyLzF\nANyb0Y9Z43uds2tsXddFrIvycsctz61mxa6z6yJ+40XfKjtDU+P5ytg0hqbGc1m3ONq2Ccy/+gl9\nO/Pu9iPcP61h43NFREQk9EIyZaJz7iHgoWrFv/Y774A7G7VSzcCI+UuqjE3aml11QgclgfV37aDA\nLCofpCFfdAvgzJLpXes2Q6dcWHRUJK/cMZbi0nL6fv/Nyt//jxdt48eLtp2zK3eghk4eOFHAdU99\nwPFalm/43W0jKru0BtovZw5l26FT+kJBRESkBQj93PlyXnsem0bP+85OHDP96ZVsnZdBTOtIpv7i\n/cryljwmsDmID9IH47G9E4NyXwmM1q0i2POY729v+c6j3PLcasDXlTvQcguKuWHBB+w9VlClfMP3\nr2boDxdXHk8I4jjK9tFRWl9PRESkhVAi2MSZWY1k8LIHF1W5ZvJlXRu7WuLp3bktu46c5taxaQG9\n7+r7JzHrD+uZf+PAgN430JbfO7FRlnL4y+1jyDlVeOELQ2hceiJ7f3QtOXmFfOHXK9l/vODCTzqP\nhZuzuffVj87ZnfTx6YP592HJNcoDuY6giIiItFxKBJsBM+Py7h3YnHWy1vPP3jq8kWskFR6+bgC3\n/2E9l3fvEND7do2L5vU7rwjoPYMhuWNso7zOxSzVECpd4qL55z0Ta5SXlJVXGdNbm63ZeTz/wV42\nfprL9sOnapyPiYrk7bvHk5LQOD93ERERabmUCDYTb3x7HBs/zeWGBR9UKdcMoaF1ZXpnPpmXEepq\nSDMQFRnBL2cO5dt/2gDAoZOFJHljQE8VljD8kSUUecuOdIyNYlTPBC6Jj+HhfxugMXkiIiIScEoE\nm5EhKfGV+6/eMYZhqR3VDUykGYn1m5l29GNL+Y9RqZSXOxZtOVSZBL7xrXFcnnxxLczLZk8IRDVF\nREQkDCgRbGY0KYxI81V9WYn/85aH6dW5LXMy+jFjZOpF3XfN3El8evxMWKznKCIiIoGhRFBEpJH0\n7tyucv/9eybygzc+4b5p/aqUX4wu7aPpUn2RUREREZHzUCIoItJI/Cd5SUmI1URPIiIiEjJKBEVE\nGtH04cmUlLlQV0NERETCnBJBEZFG9JMvDA51FURERESICHUFREREREREpHEpERQREREREQkzSgRF\nRERERETCjBJBERERERGRMKNEUEREREREJMwoERQREREREQkzSgRFRERERETCjBJBERERERGRMKNE\nUEREREREJMwoERQREREREQkzSgRFRERERETCjBJBERERERGRMKNEUEREREREJMwoERQREREREQkz\nSgRFRERERETCjBJBERERERGRMKNEUEREREREJMwoERQREREREQkz5pwLdR0CxsyOAPtCXY9aJAJH\nQ10JabEUXxJMii8JJsWXBJPiS4KtqcZYD+dc5wtd1KISwabKzNY554aHuh7SMim+JJgUXxJMii8J\nJsWXBFtzjzF1DRUREREREQkzSgRFRERERETCjBLBxvFMqCsgLZriS4JJ8SXBpPiSYFJ8SbA16xjT\nGEEREREREZEwoxZBERERERGRMKNEMIjMLMPMtptZppnNCXV9pOkys9+aWY6ZfexXlmBmi81sp/fY\n0e/cfV5cbTeza/zKP2Nmm71zT5qZeeVtzOzPXvlqM0trzPcnoWVmKWa2zMw+MbMtZnaXV64YkwYz\ns2gzW2Nmm7z4+oFXrviSgDGzSDPbYGZ/944VXxIQZrbXi4uNZrbOKwuL+FIiGCRmFgksAKYC/YGZ\nZtY/tLWSJux5IKNa2RxgqXMuHVjqHePF0QxggPec//XiDeBXwNeBdG+ruOfXgBPOuT7Az4EfB+2d\nSFNUCvy3c64/MBq404sjxZgEQhFwlXNuMDAEyDCz0Si+JLDuArb6HSu+JJAmOueG+C0FERbxpUQw\neEYCmc653c65YuAl4PoQ10maKOfcP4Hj1YqvB17w9l8AbvArf8k5V+Sc2wNkAiPNrBsQ55xb5XyD\nf39f7TkV93oFmFTxTZW0fM65bOfch97+KXwfprqjGJMAcD753mGUtzkUXxIgZpYMXAs861es+JJg\nCov4UiIYPN2BT/2OD3hlInXV1TmX7e0fArp6++eKre7efvXyKs9xzpUCJ4FOwam2NGVel5ShwGoU\nYxIgXre9jUAOsNg5p/iSQHoCuAco9ytTfEmgOGCJma03s1leWVjEV6tQV0BELsw558xMU/xKg5hZ\nO+BV4LvOuTz/LyQVY9IQzrkyYIiZxQOvmdnAaucVX3JRzOxzQI5zbr2ZTajtGsWXNNA451yWmXUB\nFpvZNv+TLTm+1CIYPFlAit9xslcmUleHva4GeI85Xvm5YivL269eXuU5ZtYK6AAcC1rNpckxsyh8\nSeAfnXN/9YoVYxJQzrlcYBm+sTGKLwmEK4DrzGwvvmE2V5nZiyi+JECcc1neYw7wGr7hXWERX0oE\ng2ctkG5mPc2sNb6BpX8LcZ2kefkbcKu3fyvwul/5DG8Wqp74BiSv8bow5JnZaK/v+ZerPafiXl8A\n3nFaRDRsePHwHLDVOfe43ynFmDSYmXX2WgIxsxjgamAbii8JAOfcfc65ZOdcGr7PUu84525B8SUB\nYGZtzax9xT4wBfiYcIkv55y2IG3ANGAHsAuYG+r6aGu6G/AnIBsowdev/Gv4+o8vBXYCS4AEv+vn\nenG1HZjqVz4c3z+wXcBTgHnl0cDL+AY1rwF6hfo9a2vU+BqHbwzER8BGb5umGNMWoPgaBGzw4utj\n4EGvXPGlLdCxNgH4u7ev+NIWiJjqBWzyti0Vn9fDJb4qKigiIiIiIiJhQl1DRUREREREwowSQRER\nERERkTCjRFBERERERCTMKBEUEREREREJM0oERUREREREwowSQRERkWrMrMzMNvptcwJ47zQz+zhQ\n9xMREbkYrUJdARERkSbojHNuSKgrISIiEixqERQREakjM9trZj8xs81mtsbM+njlaWb2jpl9ZGZL\nzSzVK+9qZq+Z2SZvG+vdKtLMfmNmW8zsbTOLCdmbEhGRsKREUEREpKaYal1Db/I7d9I5dznwFPCE\nV/ZL4AXn3CDgj8CTXvmTwHvOucHAMGCLV54OLHDODQBygc8H+f2IiIhUYc65UNdBRESkSTGzfOdc\nu1rK9wJXOed2m1kUcMg518nMjgLdnHMlXnm2cy7RzI4Ayc65Ir97pAGLnXPp3vG9QJRz7pHgvzMR\nEREftQiKiIjUjzvHfn0U+e2XoTH7IiLSyJQIioiI1M9Nfo8rvf0VwAxv/2bgfW9/KXAHgJlFmlmH\nxqqkiIjI+egbSBERkZpizGyj3/Ei51zFEhIdzewjfK16M72ybwO/M7PvAUeA27zyu4BnzOxr+Fr+\n7gCyg157ERGRC9AYQRERkTryxggOd84dDXVdREREGkJdQ0VERERERMKMWgRFRERERETCjFoERURE\nREREwowSQRERERERkTCjRFBERERERCTMKBEUEREREREJM0oERUREREREwowSQRERERERkTDzLzlz\nMtiriVQiAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x186db82de80>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "ax = plt.figure(figsize=(15,8))\n",
    "plt.plot(range(len(lossup)),lossup)\n",
    "plt.title('Training loss ')\n",
    "plt.xlabel('Epoch')\n",
    "plt.ylabel('Loss')\n",
    "plt.show(ax)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 71,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA3gAAAHwCAYAAAD0Es3SAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3XecXHW9//H358zMtvSQDSUBEopIkWZEFAuCBUVRrl4F\nr16veq/XWyw/uXpBxV6vDbsiKCiIIkWRJoQiHZIQUiAhjUB6Nr1sdnfK9/fHnJmd3ewmuzPnnO/s\n7uv5eOSxM2dn5nxm9uzmvM+3mXNOAAAAAIChL/BdAAAAAAAgGgQ8AAAAABgmCHgAAAAAMEwQ8AAA\nAABgmCDgAQAAAMAwQcADAAAAgGGCgAcAGHbMLGVmu8zssCgfW0UdXzOzq6J+XQAA+pP2XQAAAGa2\nq+Jui6ROSfnw/r87564dzOs55/KSRkf9WAAA6h0BDwDgnXOuHLDMbKWkf3XOzezv8WaWds7lkqgN\nAIChhC6aAIC6F3Z1/KOZXWdmOyW9z8xeYWaPmdk2M1tnZj8ys0z4+LSZOTObFt6/Jvz+HWa208we\nNbPpg31s+P03m9kSM9tuZj82s4fN7F8G+D7ON7Onw5rvNbNjKr73WTNba2Y7zGyxmZ0Zbj/dzJ4M\nt28ws+9E8JECAIYpAh4AYKg4X9LvJY2T9EdJOUmfkDRJ0hmSzpH07/t4/nslXSppoqQXJH11sI81\ns8mSrpf06XC/z0k6bSDFm9mxkn4n6WOSWiXNlHSLmWXM7Piw9lOdc2MlvTncryT9WNJ3wu1HSbph\nIPsDAIxMBDwAwFDxkHPur865gnNuj3NulnPucedczjm3QtLlkl67j+ff4Jyb7ZzLSrpW0slVPPat\nkp5yzv0l/N4PJG0aYP0XSLrFOXdv+NxvqRhWX65iWG2SdHzY/fS58D1JUlbS0WZ2gHNup3Pu8QHu\nDwAwAhHwAABDxarKO2b2YjO7zczWm9kOSV9RsVWtP+srbrdr3xOr9PfYQyrrcM45SasHUHvpuc9X\nPLcQPneKc+5ZSRep+B42hl1RDwof+kFJx0l61syeMLO3DHB/AIARiIAHABgqXK/7v5S0UNJRYffF\nL0iymGtYJ2lq6Y6ZmaQpA3zuWkmHVzw3CF9rjSQ5565xzp0habqklKRvhtufdc5dIGmypO9JutHM\nmmp/KwCA4YiABwAYqsZI2i5pdzi+bV/j76Jyq6RTzextZpZWcQxg6wCfe72k88zszHAymE9L2inp\ncTM71sxeZ2aNkvaE/wqSZGbvN7NJYYvfdhWDbiHatwUAGC4IeACAoeoiSR9QMST9UsWJV2LlnNsg\n6T2Svi9ps6QjJc1Vcd2+/T33aRXr/bmkNhUnhTkvHI/XKOn/VBzPt17SBEmfC5/6FkmLwtlDvyvp\nPc65rgjfFgBgGLHi8AEAADBYZpZSsevlu5xzD/quBwAAWvAAABgEMzvHzMaH3SkvVXGWyyc8lwUA\ngCQCHgAAg/UqSStU7Gb5JknnO+f220UTAIAk0EUTAAAAAIYJWvAAAAAAYJgg4AEAAADAMJH2XcBA\nTJo0yU2bNs13GQAAAADgxZw5czY55/a79mpsAc/Mfi3prZI2OudOCLd9R9LbJHVJWi7pg865bft7\nrWnTpmn27NlxlQoAAAAAdc3Mnh/I4+LsonmViou4Vrpb0gnOuRMlLZF0SYz7BwAAAIARJbaA55x7\nQNKWXtvucs7lwruPSZoa1/4BAAAAYKTxOcnKhyTd0d83zewjZjbbzGa3tbUlWBYAAAAADE1eAp6Z\nfU5STtK1/T3GOXe5c26Gc25Ga+t+xxICAAAAwIiX+CyaZvYvKk6+crZjlXUAAAAAiEyiAc/MzpH0\nGUmvdc61J7lvAAAAABjuYuuiaWbXSXpU0jFmttrMPizpJ5LGSLrbzJ4ys1/EtX8AAAAAGGlia8Fz\nzl3Yx+Yr49ofAAAAAIx0PmfRBAAAAABEiIAHAAAAAMMEAQ8AAAAAhgkCHgAAAAAMEwQ8AAAAABgm\nCHgAAAAAMEwQ8AAAAABgmCDgAQAAAMAwQcCr0p6uvHZ0ZH2XAQAAAABlBLwqvf/Kx/XR383xXQYA\nAAAAlBHwqtTckNKebN53GQAAAABQRsCrUnMmpT1dBDwAAAAA9YOAVyVa8AAAAADUGwJelWjBAwAA\nAFBvCHhVam4g4AEAAACoLwS8KjVn6KIJAAAAoL4Q8KrU0pBSruCUzRd8lwIAAAAAkgh4VWvKpCRJ\n7XTTBAAAAFAnCHhVamlIS5I66KYJAAAAoE4Q8KrU3FD86JhoBQAAAEC9IOBVqZkumgAAAADqDAGv\nSmOaMpKkXZ05z5UAAAAAQBEBr0rjmosBb1t7l+dKAAAAAKCIgFelcsDbk/VcCQAAAAAUEfCqNL6l\nGPC2txPwAAAAANQHAl6VRjemlQpM2/bQRRMAAABAfSDgVcnMNL45o2204AEAAACoEwS8Goxrzmg7\nY/AAAAAA1AkCXg3GtRDwAAAAANQPAl4NxjdntJVlEgAAAADUCQJeDca3NNCCBwAAAKBuEPBqMI5J\nVgAAAADUEQJeDca3ZLSzI6dcvuC7FAAAAAAg4NVidGNakrSjI+e5EgAAAAAg4NVk1ZZ2SdLSDTs9\nVwIAAAAABLyanHfyIZKk9mzecyUAAAAAQMCryZTxLZKk1Vv3eK4EAAAAAAh4NZk8plEN6aDcVRMA\nAAAAfCLg1SAITIdOaCbgAQAAAKgLBLwaHTK+WWu30UUTAAAAgH8EvBpNHtOkTbu6fJcBAAAAAAS8\nWh0wukGbd3fKOee7FAAAAAAjHAGvRgeMalBHtqD2LpZKAAAAAOAXAa9GE0Y1SJK27KabJgAAAAC/\nCHg1OoCABwAAAKBOEPBqRAseAAAAgHpBwKvR2KaMJGlHR9ZzJQAAAABGOgJejcY0pSVJuzpznisB\nAAAAMNIR8Go0qrEY8HYT8AAAAAB4RsCrUUsmJUna1ckyCQAAAAD8IuDVKAhMoxpS2tVBCx4AAAAA\nvwh4ERjVmKaLJgAAAADvCHgRaG5IqSNHF00AAAAAfhHwIpAOTLmC810GAAAAgBGOgBeBdBAonyfg\nAQAAAPCLgBeBVGDKFQq+ywAAAAAwwhHwIpBJ0UUTAAAAgH8EvAikAlOOLpoAAAAAPCPgRSCdCuii\nCQAAAMA7Al4E0oEpTxdNAAAAAJ7FFvDM7NdmttHMFlZsm2hmd5vZ0vDrhLj2n6R0KlCWLpoAAAAA\nPIuzBe8qSef02naxpHucc0dLuie8P+TRggcAAACgHsQW8JxzD0ja0mvz2yVdHd6+WtI74tp/kljo\nHAAAAEA9SHoM3oHOuXXh7fWSDuzvgWb2ETObbWaz29rakqmuSumUKZdnkhUAAAAAfnmbZMU55yT1\n2+zlnLvcOTfDOTejtbU1wcoGLx0EdNEEAAAA4F3SAW+DmR0sSeHXjQnvPxbpwJRlmQQAAAAAniUd\n8G6R9IHw9gck/SXh/cfCzORowAMAAADgWZzLJFwn6VFJx5jZajP7sKRvSXqDmS2V9Prw/pAXmAh4\nAAAAALxLx/XCzrkL+/nW2XHt05fAWCYBAAAAgH/eJlkZToLAVKAJDwAAAIBnBLwIBCbRgAcAAADA\nNwJeBAIzOVrwAAAAAHhGwItAsQWPgAcAAADALwJeBIxJVgAAAADUAQJeBALWwQMAAABQBwh4EUgF\ndNEEAAAA4B8BLwKBGbNoAgAAAPCOgBcBM9bBAwAAAOAfAS8CzKIJAAAAoB4Q8CJAF00AAAAA9YCA\nF4EgoIsmAAAAAP8IeBEITHJOcoQ8AAAAAB4R8CIQmEkSa+EBAAAA8IqAF4GgmO+UJ+EBAAAA8IiA\nFwELW/AYhwcAAADAJwJeBOiiCQAAAKAeEPAikAo/RVrwAAAAAPhEwItAUO6i6bkQAAAAACMaAS8C\npTF4eRIeAAAAAI8IeBEozaLJOngAAAAAfCLgRYAumgAAAADqAQEvAkHAMgkAAAAA/CPgRaDURZOA\nBwAAAMAnAl4Eyl00C54LAQAAADCiEfAiQAseAAAAgHpAwIuAGWPwAAAAAPhHwItAqYsm+Q4AAACA\nTwS8CKTCT5EWPAAAAAA+EfAiUGrBy7MQHgAAAACPCHgRMBY6BwAAAFAHCHgRKM2i6eiiCQAAAMAj\nAl4EAlrwAAAAANQBAl4EApZJAAAAAFAHCHgRKHXRZJIVAAAAAD4R8CLAOngAAAAA6gEBLwIB6+AB\nAAAAqAMEvAgYY/AAAAAA1AECXgSYRRMAAABAPSDgRSBFCx4AAACAOkDAi0BpFs0CTXgAAAAAPCLg\nRcDoogkAAACgDhDwIlBqwXN00QQAAADgEQEvAkFACx4AAAAA/wh4ESjNopmnBQ8AAACARwS8CJQn\nWSHgAQAAAPCIgBeBUgseY/AAAAAA+ETAi0B5ofOC50IAAAAAjGgEvAgYXTQBAAAA1AECXgTKLXgE\nPAAAAAAeEfAikGKZBAAAAAB1gIAXgdIsmnkSHgAAAACPCHgR6F7onIAHAAAAwB8CXgQYgwcAAACg\nHhDwIpBimQQAAAAAdYCAF4HSMgl5WvAAAAAAeETAi0BpFk1HwAMAAADgEQEvAqUxeHm6aAIAAADw\niIAXgSD8FOmiCQAAAMAnAl4ESpOs0EUTAAAAgE9eAp6Z/T8ze9rMFprZdWbW5KOOqHR30STgAQAA\nAPAn8YBnZlMkfVzSDOfcCZJSki5Iuo4olRY6J+ABAAAA8MlXF820pGYzS0tqkbTWUx2R6J5F03Mh\nAAAAAEa0xAOec26NpO9KekHSOknbnXN39X6cmX3EzGab2ey2trakyxyUgHXwAAAAANQBH100J0h6\nu6Tpkg6RNMrM3tf7cc65y51zM5xzM1pbW5Muc1AYgwcAAACgHvjoovl6Sc8559qcc1lJN0l6pYc6\nIlPqolkg4AEAAADwyEfAe0HS6WbWYmYm6WxJizzUEZlSCx75DgAAAIBPPsbgPS7pBklPSloQ1nB5\n0nVEiTF4AAAAAOpB2sdOnXNflPRFH/uOg5kpMLpoAgAAAPDL1zIJw04qMBVowQMAAADgEQEvImZG\nF00AAAAAXhHwIpIyo4smAAAAAK8IeBEpdtH0XQUAAACAkYyAFxEzFjoHAAAA4BcBLyJMsgIAAADA\nNwJeRFJGwAMAAADgFwEvImamfMF3FQAAAABGMgJeRFIBC50DAAAA8IuAF5EU6+ABAAAA8IyAF5GA\nSVYAAAAAeEbAi0jAQucAAAAAPCPgRSQVmPLkOwAAAAAeEfAiEpjoogkAAADAKwJeROiiCQAAAMA3\nAl5EUoEpT8ADAAAA4BEBLyKBmch3AAAAAHwi4EUkCBiDBwAAAMAvAl5EUsY6eAAAAAD8IuBFxIwx\neAAAAAD8IuBFJBWYaMADAAAA4BMBLyKsgwcAAADANwJeROiiCQAAAMA3Al5EUkYXTQAAAAB+EfAi\nEgRSnoQHAAAAwCMCXkQClkkAAAAA4BkBLyLFgOe7CgAAAAAjGQEvIoFJBRIeAAAAAI8IeBFJBXTR\nBAAAAOAXAS8iLJMAAAAAwDcCXkRYJgEAAACAbwS8iASB6KIJAAAAwCsCXkTMjHXwAAAAAHhFwIsI\nXTQBAAAA+EbAi0hgdNEEAAAA4BcBLyIBs2gCAAAA8IyAF5EgoIsmAAAAAL8IeBEJTLTgAQAAAPCK\ngBeRVGCMwQMAAADgFQEvImYmGvAAAAAA+ETAiwizaAIAAADwjYAXkZTRRRMAAACAXwS8iBjLJAAA\nAADwjIAXkVRgKhDwAAAAAHhEwItIOjDlCHgAAAAAPCLgRSSdIuABAAAA8IuAF5F0EChfcHJMtAIA\nAADAEwJeRDIpkyRl8wQ8AAAAAH4Q8CKSThU/ylyh4LkSAAAAACMVAS8i6YAWPAAAAAB+EfAikim1\n4OVpwQMAAADgBwEvIulwDB4zaQIAAADwhYAXkUxQ/CiztOABAAAA8ISAF5FUOAYvTwseAAAAAE8I\neBFJs0wCAAAAAM8IeBHJsEwCAAAAAM8IeBEpLZOQowUPAAAAgCcEvIiUWvCYZAUAAACALwS8iLBM\nAgAAAADfCHgRSbNMAgAAAADPvAQ8MxtvZjeY2WIzW2Rmr/BRR5QyKcbgAQAAAPBrQAHPzI40s8bw\n9plm9nEzG1/Dfn8o6U7n3IslnSRpUQ2vVRfSzKIJAAAAwLOBtuDdKClvZkdJulzSoZJ+X80OzWyc\npNdIulKSnHNdzrlt1bxWPSnNosk6eAAAAAB8GWjAKzjncpLOl/Rj59ynJR1c5T6nS2qT9Bszm2tm\nV5jZqN4PMrOPmNlsM5vd1tZW5a6SU5pkJc8kKwAAAAA8GWjAy5rZhZI+IOnWcFumyn2mJZ0q6efO\nuVMk7ZZ0ce8HOecud87NcM7NaG1trXJXyWGSFQAAAAC+DTTgfVDSKyR93Tn3nJlNl/S7Kve5WtJq\n59zj4f0bVAx8QxqTrAAAAADwLT2QBznnnpH0cUkyswmSxjjnvl3NDp1z681slZkd45x7VtLZkp6p\n5rXqCZOsAAAAAPBtQAHPzO6XdF74+DmSNprZw865T1W5349JutbMGiStULGFcEjLMMkKAAAAAM8G\nFPAkjXPO7TCzf5X0W+fcF81sfrU7dc49JWlGtc+vR+UWPMbgAQAAAPBkoGPw0mZ2sKR3q3uSFVQo\nzaKZYxZNAAAAAJ4MNOB9RdLfJC13zs0ysyMkLY2vrKEnU55Fk4AHAAAAwI+BTrLyJ0l/qri/QtI7\n4ypqKCq34NFFEwAAAIAnA2rBM7OpZnazmW0M/91oZlPjLm4oSZcmWaGLJgAAAABPBtpF8zeSbpF0\nSPjvr+E2hMxMqcCUZ5kEAAAAAJ4MNOC1Oud+45zLhf+uktQaY11DUiZljMEDAAAA4M1AA95mM3uf\nmaXCf++TtDnOwoaipkxKHdm87zIAAAAAjFADDXgfUnGJhPWS1kl6l6R/iammIaspTcADAAAA4M+A\nAp5z7nnn3HnOuVbn3GTn3DvELJp7acoE6sgyBg8AAACAHwNtwevLpyKrYpigiyYAAAAAn2oJeBZZ\nFcNEYyaljhwteAAAAAD8qCXgMV1kL03pQB1dtOABAAAA8CO9r2+a2U71HeRMUnMsFQ1hTZmUtrZ3\n+S4DAAAAwAi1z4DnnBuTVCHDQVMmUCeTrAAAAADwpJYumuilKZNSR44umgAAAAD8IOBFqDFNCx4A\nAAAAfwh4EaIFDwAAAIBPBLwI0YIHAAAAwCcCXoRKLXjOsYIEAAAAgOQR8CLUmA7knJTNE/AAAAAA\nJI+AF6GmTEqS1Mk4PAAAAAAeEPAi1JgufpwdjMMDAAAA4AEBL0KNtOABAAAA8IiAFyFa8AAAAAD4\nRMCLEGPwAAAAAPhEwIsQLXgAAAAAfCLgRagxTQseAAAAAH8IeBFqyhQ/zk5a8AAAAAB4QMCLUCZV\n/Di78gQ8AAAAAMkj4EWoIRyDlyXgAQAAAPCAgBehhhQBDwAAAIA/BLwIZcIWvK4cAQ8AAABA8gh4\nEcqkTJLUlXeeKwEAAAAwEhHwItSYKi6TkKUFDwAAAIAHBLwIZdLFFjzG4AEAAADwgYAXofIyCbTg\nAQAAAPCAgBehdEALHgAAAAB/CHgRMjM1pAMmWQEAAADgBQEvYg2pgC6aAAAAALwg4EUskzK6aAIA\nAADwgoAXsYZ0QMADAAAA4AUBL2IZumgCAAAA8ISAF7GGVKAuWvAAAAAAeEDAixhdNAEAAAD4QsCL\nWCYVKMsyCQAAAAA8IOBFLJMyxuABAAAA8IKAF7HGdEqdubzvMgAAAACMQAS8iLU0pNTeRcADAAAA\nkDwCXsSaGlLakyXgAQAAAEgeAS9iLZmU9tCCBwAAAMADAl7EmmnBAwAAAOAJAS9izYzBAwAAAOAJ\nAS9izZmUunIF5QushQcAAAAgWQS8iLU0pCSJbpoAAAAAEkfAi1hzJgx4dNMEAAAAkDACXsSaG9KS\nCHgAAAAAkkfAi1i5BY8umgAAAAASRsCLWGkMXntXznMlAAAAAEYaAl7EmmjBAwAAAOAJAS9i5Vk0\nGYMHAAAAIGHeAp6Zpcxsrpnd6quGODSXu2gS8AAAAAAky2cL3ickLfK4/1gwyQoAAAAAX7wEPDOb\nKulcSVf42H+cSi14HQQ8AAAAAAnz1YJ3maTPSCr09wAz+4iZzTaz2W1tbclVVqMWumgCAAAA8CTx\ngGdmb5W00Tk3Z1+Pc85d7pyb4Zyb0dramlB1tWtKM8kKAAAAAD98tOCdIek8M1sp6Q+SzjKzazzU\nEYsgMDVlAsbgAQAAAEhc4gHPOXeJc26qc26apAsk3euce1/SdcSpOZOiBQ8AAABA4lgHLwYtDWnt\n7sr5LgMAAADACJP2uXPn3P2S7vdZQxzGNWe0vT3ruwwAAAAAIwwteDEY35LRtj0EPAAAAADJIuDF\nYExTWrs66KIJAAAAIFkEvBiMbsxoVycBDwAAAECyCHgxGNOU1s4OumgCAAAASBYBLwajG9Pa1ZmT\nc853KQAAAABGEAJeDMY0pVVwopsmAAAAgEQR8GIwYVSDJGnrbrppAgAAAEgOAS8GE1rCgNfe5bkS\nAAAAACMJAS8GoxpTkqT2rrznSgAAAACMJAS8GLQ0pCVJ7V2MwQMAAACQHAJeDFoaaMEDAAAAkDwC\nXgyaM8WAt4eABwAAACBBBLwYdLfg0UUTAAAAQHIIeDEoj8HL0oIHAAAAIDkEvBg0ZYofawddNAEA\nAAAkiIAXAzNTKjAVnO9KAAAAAIwkBLyYBCYVHAkPAAAAQHIIeDExM+UJeAAAAAASRMCLScpM5DsA\nAAAASSLgxSQwqcAgPAAAAAAJIuDFJDAmWQEAAACQLAJeTIxJVgAAAAAkjIAXk+IyCQQ8AAAAAMkh\n4MWk2EWTgAcAAAAgOQS8mJiZ8gXfVQAAAAAYSQh4MQlMcrTgAQAAAEgQAS8mjMEDAAAAkDQCXkxY\nJgEAAABA0gh4MTEWOgcAAACQMAJeTOiiCQAAACBpBLyY0EUTAAAAQNIIeDExEy14AAAAABJFwIsJ\nC50DAAAASBoBLyYpMxVY6BwAAABAggh4MaGLJgAAAICkEfBiwiQrAAAAAJJGwItJENCCBwAAACBZ\nBLyYpJhkBQAAAEDCCHgxMbpoAgAAAEgYAS8mgUkFEh4AAACABBHwYsI6eAAAAACSRsCLSRAQ8AAA\nAAAki4AXk8DEGDwAAAAAiSLgxSQwYwweAAAAgEQR8GKSoosmAAAAgIQR8GLCMgkAAAAAkkbAi0lg\nkqMFDwAAAECCCHgxCcyUJ+ABAAAASBABLybFSVZ8VwEAAABgJCHgxaS4TAIteAAAAACSQ8CLSWAm\n8h0AAACAJBHwYhIEYgweAAAAgEQR8GISGOvgAQAAAEgWAS8mdNEEAAAAkDQCXkwCk/KsdA4AAAAg\nQQS8mNBFEwAAAEDSCHgxCQK6aAIAAABIFgEvJqyDBwAAACBpBLyYBGaMwQMAAACQKAJeTILARL4D\nAAAAkKTEA56ZHWpm95nZM2b2tJl9IukakhCY5OiiCQAAACBBaQ/7zEm6yDn3pJmNkTTHzO52zj3j\noZbYMIsmAAAAgKQl3oLnnFvnnHsyvL1T0iJJU5KuI26MwQMAAACQNK9j8MxsmqRTJD3ex/c+Ymaz\nzWx2W1tb0qXVLDCWSQAAAACQLG8Bz8xGS7pR0iedczt6f985d7lzboZzbkZra2vyBdaIZRIAAAAA\nJM1LwDOzjIrh7lrn3E0+aohbEJjyBDwAAAAACfIxi6ZJulLSIufc95Pef1LMxDIJAAAAABLlowXv\nDEnvl3SWmT0V/nuLhzpilTJjmQQAAAAAiUp8mQTn3EOSLOn9Jq24TILvKgAAAACMJF5n0RzOAhPL\nJAAAAABIFAEvJsWhhqKbJgAAAIDEEPBikgqKAY9GPAAAAABJIeDFJMx3rIUHAAAAIDEEvJiUumgy\nDg8AAABAUgh4MSl10aQBDwAAAEBSCHgxoYsmAAAAgKQR8GISWGmSFQIeAAAAgGQQ8GJSGoNXKHgu\nBAAAAMCIQcCLSYoumgAAAAASRsCLSRDQRRMAAABAsgh4MSkvk0DAAwAAAJAQAl5MSrNoku8AAAAA\nJIWAF5MUs2gCAAAASBgBLybpVPGjzeYIeAAAAACSQcCLSUtDSpK0J5v3XAkAAACAkYKAF5PmMOC1\nd+U8VwIAAABgpCDgxaQ5E7bgddGCBwAAACAZBLyYlLpo7ibgAQAAAEgIAS8mY5oykqSdHVnPlQAA\nAAAYKQh4MRnblJYk7epkDB4AAACAZBDwYjI6DHg7Owh4AAAAAJJBwItJYzqlhlRAwAMAAACQGAJe\njEY3pbWrkzF4AAAAAJJBwIvR6Ma0dtGCBwAAACAhBLwYjWlKM8kKAAAAgMQQ8GI0ujHNGDwAAAAA\niSHgxYgWPAAAAABJIuDFiBY8AAAAAEki4MVoNC14AAAAABJEwIvRmKaMdnawTAIAAACAZBDwYjS6\nMa1s3qkjm/ddCgAAAIARgIAXo7FNaUliHB4AAACGjF2dOX3oqllat32P71JQBQJejMY0ZSRJO+im\nCQAAgCHir/PW6t7FG/XDmUt9l4IqEPBiNK65GPA27uj0XAkAAACAkYCAF6NDJzZLki781WOeKwEA\nAAAGxznfFaAaBLwYHTyu2XcJAAAAgDdduYK6cgXfZYwoBLwYjWpM+y4BAAAAI9DVj6zUFQ+u8F2G\nTvryXTr5K3f5LmNEIeABAIDEdGTzumzmEnXmWEIIRXcuXK/HV2z2Xcaw88VbntbXbltU1XMtwjr2\nZPNq7+L3PUkEvIQ4OjEDAKArHlyhy2Yu1W8fed53KRiEH9+zVEd99vZYXvuj18zRey5nvoJ65MT5\n61BEwIvZR197pCRp2cZdnisBAMC/PdnilfyOLFf043bmd+7T1Y+sjOS1vnf3EuUKnOwDQwEBL2bv\neukUSdLs57d6rgQAMJQ9vmKzPnX9U/QIwYCt3NyuL97ydKSvuWkXSz8hGQtWb9cP7l7iu4whiYAX\nsyNbR2tsU1qX3LSAq5UAgKq994rHddOTa5SnFQUerd66x3cJSIBFOQivSm/7yUP64T0stF4NAl7M\nzKzcHeXYCF3QAAAgAElEQVSiP83zXA0AYKAKBVdXU3sH4QnXUM93VsP0DZ/8w1z9352LI6xm6Lhn\n0QZt35P1XQYtyAl4YEmbNtdJSyk/7qGJgJeA+/7nTEnSbfPX+S0EAIaYexdv0LSLb9Pi9TsS3/e7\nf/moXvT5OxLfb3+y+eKZVmGYnHFV8y7+/NRa/ez+5ZHXkoS12/boyoeeq+q5G3Z06MNXz9Z///7J\niKsavOFx9NWvbL6gf/71E/qnKx73Xcqw8sCSNn33b8/6LiMxBLwETJ3QUr5N33UAGLi7nt4gSXps\n+eCnUHfO6bKZS/TC5vY+v9/eldNzm3b3+/zS2OlVW/p+vi/1kO9mrdyijTs6qnpuPXT98uFfr56t\nr976jNZsG3wXx9IQj5Wb+z9eUVxu4Yczh3aXvtIFnOVtTM5XEkWr8T//+gn95L5lEVQzNBDwEnLm\nMa2SpBlfm6nn+QMNAAOyrb3YJe1Lf31m0M9dvXWPLpu5VB++elaf3//Ib+fodd+9f7+vE+eFua27\nuzRnkJNw1UML3j/+4lG9+YcPVvXcUr5L+m10ZPPa1t6V7E4r7OgoHsv5/ODfeC3dWqNWB4dfvz56\nzRz9YOaS8mc9FNXLz7pe6pCkrnz9dJUfKgh4CbnyAy8r337td+5nwhUAXqze2q5pF9+mm+euHvRz\nnUt+TNr6KluJpO4g1NlPzQ8t2zSg16n2fLZtZ6fuXbxhn495208e0jt//sigXrfWgFcoON08d7Vy\nNZ40bd5dZVgKm/CSXl/rZV+bqZO/cnei+6xUarkc+uuK1X/9i9Ym36U7KqXjI1vFhYA41EMVc1/Y\n5ruEIYeAl5BUYLr9468u33/xpXdq2sW3lf995oZ5WrZxlwpDffQ8gMgUCk6fu3mBFq0b/MnK3Be2\n6sGlbXttXxquyfnnuWsH/ZrXPbFKL/r8HYPuYnbdEy/o5d+YOej9SdJTq6r/jz2qloZclSdaL/v6\nTH3oqtnK7iNIVTMjYa1v6zePrNT/++M8/ebhlTW+Um2Sbgna2Zmr6fm/e3SlHh7gRYG+1FOLSC3q\nuQXPp2y+oGkX36Yb5gz+4lmlevl882EhK/fRjT0pnBsPXtp3ASPJcYeM1XPffIu+/NdndFWvhUev\nn71a18/u/4/CRW94kd520iGaOqFZ6RS5HKg3Nz25WmObMnr9cQdG9prL2nbp2sdf0D2LNuqxz549\nqOee/7Niq9DKb53bY3vpFLOaVqBb5xdD4cpNuzVlfPOAn3fJTQskSZfNXKJPvv5Fg95vrWod81Vz\ni1nEZ2y1vtxXby12d924s/rW0VrUcgz6dOlfiuvJ9f6dGqhyC97Qett7GQrl11LjJTfN13VPrNLc\nS9+gCaMaBvy8Z9fvlCT9z5/m6V0vnVpDBfVhwZrtkupjHWfy3eAR8BJmZvrSecfrS+cdr45sXlc+\n9Jy+M4BZfb539xJ9r4bFHk+YMlYHjmnS+JYGvfTwCXpoWZu+9c4TNbYpU/VrZvMFzVq5RcceNHZA\nfwSXbdylLbu7dNr0ift97J6uvFZu3q1jDx5bdX2ob+1dOb2wpV0vPmh4/Iw/dX1xGZRqT/768sYf\nPBDZa5U8sKT6FohHwolOnnx+q844atKgn3/ZzKWJBryBnhM452RDaeaPiE52Zi7aqM+de1w0LzYI\nG8Jutz++d5kueuMxie/ft2p+fKUwvmqL/zXohnpA3Z/rnlglSXpsxWa9+SUHD/h5u2psIS6pl8+3\nnv4i5uvlQxlCCHgeNWVS+q/XHaX/et1RPbY757SzM6eFq7frf2+aH8kf9IVrdmihit28bnyy2FJ4\n+4L1Nb9uyeKvnqPOXEGBSS0NaZmkh5dv0nObduvdMw5VUyal13//75KKJ8B7uvJqSAdKBaaFa7br\nrT9+SDf95yuVCQI9tWqrHl62WXc+vV4Lv/wmjW7c92G6fU9Wq7a064Qp4yJ7P9V6atU2veOnD+uR\ni8/SIYNo4ahW285OjW/JKDMEW3U/ft1czVy0UYu+co6aG1L7fOz67R2679mNuvC0w/b7ul/56zN6\nZPkm3fnJ1+zzcbNWbtGqLe36h1Pr/0prLePQevv1w8Vp2tfUsFjx00NkfEtp5jXfJypRn5tENYZr\nXzOIxqnqsXvDRDVr2S3bWD8zKg61ltdq+QoV9TJGM6iji1500Rw8Al4dMjONbcrolUdN0oOfOWuf\nj3XOKV9w2tWZ07rtHVretksr2nZr9dZ2rd66R89vbq9qSubBevGld/b7vS+E3VpKPvDrJ/T3JXuP\nDfqHn+090cDNT67W+18xrXw/my9o3bYOHXZAix5bsVkzDp+gf7riMS1cs0M/vvAUPbt+p/7nTf6u\nCF90/VOSpH+64vHy+oe95QtODyxt05kvaq2p1aAjm9fLvj5T754xVf/3rpMG/LxpF98mae+WpmkX\n36bXHdOq33zwtKpr6sv81cXQ++glZ+vAsU3l7TMXbZQkdeUK+w14p3/zHknSq4+e1GPZkb6UAsz+\n/OMvHpWk2ANeoeB0xGdv15TxzXr44r1/n+9cuF4L1mzTp9/04ljr6G1FDSf3dz4d3cWhOJVOCfb3\ne+bcvrtx1nqeF3nAq6Nznfmrt+nEqeMH9ZxMqn5OHJP0fLhcx/uveFwLvvymQT23mpPtuBYkHykn\n2/lBvs+ogm+9/H7XUb4b9M8CBLwhz8yUTpnGtzRofEtDrF0aS/9ZFFzxl83J6ZjP9x/s+tNXuOvP\npX95Wpf+5Wnd8t9naNLoRn37zsX6y1Nr9Yv3naqPXvOk/uPMI7VwTbE14WPXzZUkrwFv447idOr7\nujJ+xYMr9M07FuuX73+p3nT8QX0+xjmnH8xcqnfPmNpvoCnNxHrr/HU9At6W3V2av3qbzjxm8qDr\nv+/Z/f9sHl2+WeOaMzrukIEda1c9vFIFJz20dJPe2ce4hMH8pzjn+a37DXhRy+YLuvjGBfrYWUdp\n2qRRg37+4nBcRn8XWj56zRxJSjzgjQSlQ6vWE5W/Pb1erzjygKqfH/kYvEhfrTY79gy+W9pQ7HEQ\npVonexmouILC5Q+u0Cur6KKdpCje+2B/b9/7q2gWJq+X3+86ynfKEfAGjYCHAStdBU9ZcVZQad/j\njZxzyuadApPas3m1ZFLa3ZnXzs6snCv2V8+kAm1t79Kujpz2ZPM6snW0XtjSrgktGb0rbGGRpPN+\n8nCP1/7oNU9Kkn5+//K99pvLF3pMRPP85t06/IDBn5jvS1euoPXbiy2Jdy5cr6kTmnXClHFqSAdS\nP0tm7enKq7khVR7HdOfC9T0C3rfvXKzTpk3U6148WUs27NKP7lmqH92ztMdnvKMjq6Ubdumlh08o\nX9Fq7+q55MYHr5qleau2lbs+fuWvz+i3j67Usm+8per3u6Mjq5ZMSulUoAt/9Zikff/s23Z2qnVM\no6Tubi5BP+d0g/mz7aNr0Jznt+rGJ1dr1ZZ2Xf/RV5S3r9rSrinjmxUE+/5vsDPHkii+lC5K7a/1\nY39H1VWPrNSXzju+6joGctz2/rtV6+slpZruhqn9/M7Uu3zBDfn3UIv7B3Ah0Lcoujn6WnqtXlqr\n6mlccq4Q3Q9j1sotetm0/c8FMdQR8BAbM1NDuvgHYmx44jKuJdC4ln1P7HLMQWMkSfO+8EZ98ZaF\n+vNTg5vO/ajP3SFJakwH5fWvPn7WUcqkAhWc9O+vPUJNmZQ6c3k9s3aHHluxRT+6Z6nOPnaybp2/\nTu87/TD9yyunqXV0kx5atkn/9fsnNe8Lb9TcVVv1m4dXamxzRk3pQH+as1oLvvTGcgvMym+dWwx4\nfXh0+WZd+KvH9Pt/fXm5BfPmuWv0g/ecXH7Mz+9frp9ruVZ+69x+10l8/5VPaN6qbVr69Tf3O6vU\n0g3FFqNsoaBmpQbcZXFfTvzSXTr3xIP10/ee2uf3//EXj+ikqeP1+bcepz/PXaNP/vEp3fyfr9Qp\nh03Qum3F8WMPLd2s80/ZuwWvr25E375zsX5+//K9QmRndu8/8lt2d2n7nqymD6B1beWm3Tr8gJYe\n/3H1NcHGI8s26c9PrdH/veuk8sl05UOWbdyp13//Af3vOS/Wf5x55D73WSf/V49Ipc9+f+OXisdg\nfCczAzkGugYR8O5cuF7vO/3wGquKRjU5Jz3Ew5GXgFfF7kb0n54I3nzp/9KkxdW1drDqKN9FGnrn\nrdpGwAN8GteS0WUXnKLLLjhlv4/NF5w+fcM83fTkmvK2ysWNf3TvsvLtH8zsezbSW+evkyRd89gL\nuuaxF3p876Sv3NXnc0rTv0vSfc9u1Lrt3ZNhPLJ8k0ymgnP6z2uLLY7vvaJnF455q7YpMFNXvjvQ\nLVyzvd8FmOeFa4LNWrlFR0wa3edjSi16z29q10umdk8805Ur9AighYLbZ+vTzo6svnH7Yl385mLX\nwdvmr9NP39v3Y2et3KpZK7fq8289Ttc+/rwk6epHVuqUwyboiZVbJBUn9/neu/ceK9jelVfvzm+l\nltne4eu6Wat0Qa+JVk79anHh4r3CYC6vxnT32L6Hl23SP13xuL76jhP0/oqT41zB7TUmqPRz+vY7\nTyx39amcIW1VOEHJ3c+s32/A29caaIjXQFu6tuzu0uSK8aFRG8gJW1euoJYBzsi+1fMkJZVjsKq5\nyj/U14Pz0oJaxS7rJSj0tnFHh8a3NPR7QTQKs5/fWnM30l8+sEKXvOXYiCoauHppwZs0utF3CWVR\nLvpezdqjQxEBD8NCKjB9/90n6xvnv0S7OnN6YEmb2rvyunPh+n7DUhRKoVCSPvibWT2+N5D++G//\n6cN7bXvrjx/qcf/YS+9U3rkeQaH3a5/61buVzRXUWfGYt/3kIU0a3X3G+KLP36HDJnaPXzvis7fr\n6Mmj1dKQ6hGEvnXHYk0e06jv3fWsdnfldd0T3WH3nkUbyrcfXb5ZhV51LVq3Q7NWFtfM+fNTa/Xv\nr+0ZfpZs2KmObF4d2Z513vGJV8tkMusZiP77urn6/Lnd/8HOW7VNW3Z3qStXUDZf6BHiZ6/coqMm\nd4fepRt2adqkUcoXnAoFpyseXCFJuvTPC3X+KVPKj3thS7sOn9iiwEzZQs/XXLJhV3nGv6fX7pBz\nTrmC0+4w7D35wjZ1ZPPl+33pyg0s4EU5VX8SJ3ZDYWmBgS5Qfs/igc3QWq0tu7s0fj/p7fYF6/Xe\nlw+shl89uEIfO/voKEqrSraiu9SOKrpoRqGW4293Z06j9jM78774CHjVdDmMYtzS9bNW6VVHT4ps\nVuiuXEGnfeMevfPUqX1e8IuqdfT2Bev0cY+/I7Wok3w3oF4xg1Xt7+2C1dsiW1twpFx0JeBhWGnK\npNSUSZVnRqylG1Oh4JR3TgXn1JkraHt7VrmCU3tXTqMa0lqyYaeOnDxaoxrS+vuSjTr6wDFqzqS0\nZXeXduzJasKoBhWcUzooLgdxw5xVOvnQ8Tp0QovWbu/Qjj1ZHX5AiwpOakgH2rK7U/ctbtO5Jx6s\n5kxKDy/bpL88tVZvPfFgpVKmhlSgzbu79PvHX9BrXtSqo1pHl7tfvvmEg9SQDtSQDrRtd1Z/nL1K\np02bqKMOHK2Fa7Zr/urtOm36RB0yrkltOzu1J5vX+adMUUc2r/auvPZUjOP75QPL+x2g/uGrZ5dv\nl8biVXrzDx/c5/2+1nXb1p7VK755b5/7u23+Ot1WEaKl7ha73irHbEp7B+VKJ3zxb+XbZ3/v7/0+\n7k2X9ax3+iW37/WY3jPITrv4NjVlAhUKxfGHlVdjT/ji3+ScU8EVTxIrP+fpl9yucc3F7st9BbST\nvnyXUkGx7aP4/6NJKgbOfD78Gh6zlfucdvFtOmhskwrOaU+vrr+nf+MeBVZsHCjVU3AKa+xZZ++a\npl9yuw6d2FxupXbhY0vPcxVfK73s6zM1qiFVPl0tfbvyBLa8rdfHcNZ379/vmMdKGyuWlzjnsgdU\nCAN6oeB6nPxectMCXfnQc8rmC8rmCurKu71OAj538wIFZnIKP49yfd3vvXJ75fs563t/1wfPmCap\nu/Wq9Hel5LM3Lyh3JS19jvlC+DMouB6hYkdHTpf+eaHMFB4PFt62im3F7aX3mi90v/dKdyxYV3E8\nFZ9X+bPMF0pfw23hrM0ln7lxvsa3ZPb6PHp/Dt3bnB5ZUftFt8Xrdw5qUrHK9z1v9Ta98sjqW3eu\nfewF/dtrjhjw4//3hvl66eETqt6fNPALRVLxM/7Z/cv1huMOrGmfOzqy+syN83XwuCY9esnZNb1W\nSeliWH89Ojbt6uwx23K1olxaJmlRXEC4ee5qvWRK9+y2++uxU+mi6+fpNS+KZhKdlZt291huYldn\nTmOqWH/56kef15fffkLVdVT+P3TLvLX6+vkvqfq1hgqr1yb8SjNmzHCzZ8/e/wMB1CxfcNqxJ6ts\noaBVW9qVDgLt7MhpxaZdOvnQ8QrM9MjyTTr+kHFKB6ZMOlDbzk7Nem6LXnr4BDWkA908d40mjW7U\n6UdMVFMmpZvnrtGJU8frwLGNakoXQ3gqMN06f60OGtuk1jGNxTAgp0wQKO+cLrlpgf7t1dM1fVKx\nlfGbdyzSBS87TBNaMmpIp9SQDpRJmdp2duprty3SJ84+WuOaM9q+J6uf3b9Mn3rDMUoFxQk2UkHx\n3xf+8rSOnjxa/zhjqra1Z/W7x57Xh86YrnRgKjgpnTI1pgN1ZPP67l1L9N6XH6YjW0frq7c+I0n6\n+NlHKxOY0qlA375zsQ4Y1aAPv3q6RjWktXLzbv3m4ZX68KumKxVYuF8pZVbuIvyhM6YrMCkIiifi\ngZkeWNKmp9fu0AdecfheVzZ3duTK61a+7/Ri607pRLkkE5hSQaB0qvge0+F7fXjZJs1auVXnvuRg\njWpMKTBTU6b4tXRh4N0zpqrgiuOoTKYgKIaCIKytNDlJUNoWmC5/oNgS+objDtToxrScK544BBXP\nKwWO0v2Cc+Vuz2887sDyshild1t63z3evZW+mG5bsFYd2YLedPyBSvc3W08/bluwrrzfVGAKKj6j\nrlxBt85fp8Ckc044SJlUUP7XkDJ15gr6w6zioscHhBdszHqG7NL77BmuisVXzpw6pind8wdnxQtS\nbTu7Z2Ua05iWk8qfdcos/GyLx9Haii7gEypClXOu79tySoXHfzoVKLDie6+nk9/DJraUW2xKtRdv\n9wyHpa+Vn+kRraN6XlgodF9Y6HmhoXhxo7LnwNGTR5cfkw8DsHOlCyS9Am4YjisD+TEHFseKlwJx\n6WKgC1+vFIbzzmnDjr1n3npxONa8r/fa/RmEW13PZU2OPXhsxazW3YG6EN7YHI5LrpRJmY6ePKbH\nxZjKCzu9Lzw557RpV5e6wgsdxx48VovWda9/eWLY/b/0O1uoeN+VPxNX8bMoOKe2nZ3lz/GUw8aX\n3/NT4fADSTpp6rjuAWDh++zvglDvi0HPVNR41OTR5b9BUvh3LKj4G6Xuv1G5QvGiTjZf0JIN3WN2\nK4N55fly73qknu/h5EPHl39+pWNzr89G6vP4Xbejo/y6R0wa1d1VsuIPZOlm6aJJ8eJe8XZXrlCe\nvbnSy6dPlJl6XHwsHeOl27m863MJndOPmFh+v6VjsufFrsq/O8UbTtL81dv3eq1XHnlArwtJ/f/e\nlmZJLz2v+2dRev99XBgsbyh+6czlNa9XHa86alKfYwwr/w9++fSJe61TXQ/MbI5zbsZ+H+cj4JnZ\nOZJ+KCkl6Qrn3Lf29XgCHgAA0dnVmdPyjbvUkA76PFlKhQHTShdIKk6OU0GxR0FHLq912zvUEE4O\n06MVsVfoLX2/FIofXNKmL/31Gb3j5EOUdz0fUxn8y6db4Y0de7KauWijJo1u1MuPmFisq+LCQvni\nRB8XK1Ztadc9izeqdUyjZhw+oXxhIlUZqMNQXbo4U37/gWl7e7F3xPRJo3TMgWPKJ7iVwaHyYkjp\n9vWzixdpXn/sgZoZdnN/w3EHVlwo6P6sKj/H0vsuXTT467y15dfpfWEhCD+4Ymgx3RI+dnxLRtva\nszr+kLE6eFxzxWfV/bXycyrtq3T/j7NXlettzqTKr/u6Y1p7hJxU0POz7v78ba993jCn+Hm8+uju\nVqKlG3Zp/Y4OTR7TqOMOGauC633c9H9BqPuc3LStvUuzn9+qg8Y26aWHT+jVq2Dv3glS8WsqKF7U\nyaQCdeUKumdxcZ3WM446oMeY0X2FggcqloB6+fSJ4QW1fo7Nip9b78dYxTFz2rSJSgXWZ5CRihmm\ndBGn8mJQqf5MypTNO00Z36wpE5olV5zRuvsiZPeFpNLrlC6KvWfGoeWf/2nTJxZ3VtlDoI/f9dLn\nUXpMacbV0sVXSZpx+ITyhazev6O9P5PF63eWL+rMOHxCj8+/8neke1vPn5PJlCsUNGvlVgXW3f31\n1MPG79XpuXccevXRk3TRG/0tu9Wfug14ZpaStETSGyStljRL0oXOuWf6ew4BDwAAAMBINtCA52O1\n0dMkLXPOrXDOdUn6g6S3e6gDAAAAAIYVHwFviqRVFfdXh9t6MLOPmNlsM5vd1lb/i2oCAAAAgG8+\nAt6AOOcud87NcM7NaG1t9V0OAAAAANQ9HwFvjaRDK+5PDbcBAAAAAGrgI+DNknS0mU03swZJF0i6\nxUMdAAAAADCsJL7QuXMuZ2b/LelvKi6T8Gvn3NNJ1wEAAAAAw03iAU+SnHO3S7rdx74BAAAAYLiq\n20lWAAAAAACDQ8ADAAAAgGGCgAcAAAAAwwQBDwAAAACGCQIeAAAAAAwTBDwAAAAAGCYIeAAAAAAw\nTBDwAAAAAGCYIOABAAAAwDBBwAMAAACAYYKABwAAAADDhDnnfNewX2bWJul533X0YZKkTb6LwLDF\n8YU4cXwhbhxjiBPHF+JUr8fX4c651v09aEgEvHplZrOdczN814HhieMLceL4Qtw4xhAnji/Eaagf\nX3TRBAAAAIBhgoAHAAAAAMMEAa82l/suAMMaxxfixPGFuHGMIU4cX4jTkD6+GIMHAAAAAMMELXgA\nAAAAMEwQ8KpgZueY2bNmtszMLvZdD+qXmf3azDaa2cKKbRPN7G4zWxp+nVDxvUvC4+pZM3tTxfaX\nmtmC8Hs/MjMLtzea2R/D7Y+b2bQk3x/8MrNDzew+M3vGzJ42s0+E2znGUDMzazKzJ8xsXnh8fTnc\nzvGFyJhZyszmmtmt4X2OL0TGzFaGx8ZTZjY73DbsjzEC3iCZWUrSTyW9WdJxki40s+P8VoU6dpWk\nc3ptu1jSPc65oyXdE95XeBxdIOn48Dk/C483Sfq5pH+TdHT4r/SaH5a01Tl3lKQfSPp2bO8E9Sgn\n6SLn3HGSTpf0X+FxxDGGKHRKOss5d5KkkyWdY2ani+ML0fqEpEUV9zm+ELXXOedOrlj2YNgfYwS8\nwTtN0jLn3ArnXJekP0h6u+eaUKeccw9I2tJr89slXR3evlrSOyq2/8E51+mce07SMkmnmdnBksY6\n5x5zxUGzv+31nNJr3SDp7NJVJQx/zrl1zrknw9s7VTxJmiKOMUTAFe0K72bCf04cX4iImU2VdK6k\nKyo2c3whbsP+GCPgDd4USasq7q8OtwEDdaBzbl14e72kA8Pb/R1bU8Lbvbf3eI5zLidpu6QD4ikb\n9SzsFnKKpMfFMYaIhN3nnpK0UdLdzjmOL0TpMkmfkVSo2MbxhSg5STPNbI6ZfSTcNuyPsbTvAoCR\nzDnnzIypbFETMxst6UZJn3TO7ai8eMgxhlo45/KSTjaz8ZJuNrMTen2f4wtVMbO3StronJtjZmf2\n9RiOL0TgVc65NWY2WdLdZra48pvD9RijBW/w1kg6tOL+1HAbMFAbwuZ+hV83htv7O7bWhLd7b+/x\nHDNLSxonaXNslaPumFlGxXB3rXPupnAzxxgi5ZzbJuk+FcedcHwhCmdIOs/MVqo43OUsM7tGHF+I\nkHNuTfh1o6SbVRxqNeyPMQLe4M2SdLSZTTezBhUHY97iuSYMLbdI+kB4+wOS/lKx/YJwRqbpKg7i\nfSLsRrDDzE4P+3X/c6/nlF7rXZLudSxuOWKEx8OVkhY5575f8S2OMdTMzFrDljuZWbOkN0haLI4v\nRMA5d4lzbqpz7v+3d8cgUl1RHMa/P0ZwMSLRQBBCWEQriQliFSzEQnBbCxNiI1YWwUoUhFRWqYLR\nxhQhRbCwSGMh6i4EIQGbrJvdIhHFbgNaJCCIiJwU7y6MYhLU2ZnkzfeDw9w5Mzzug8MwZ+69zDTd\nd6m5qjqM9aUhSbI+yYaVMbAfWGQSaqyqjJcMYAb4DbgDnB73fIz/bgAXgWXgCd2e7aN0e7NngdvA\ndWDTwPtPt7r6FTgwkN9N96F0BzgHpOXXAZfoDgLfBLaO+56NkdbXHrrzBQvAfIsZa8wYUn3tBH5u\n9bUIfN7y1pcx7FrbC1xuY+vLGFZdbQVutVha+c4+CTW2MjlJkiRJ0v+cWzQlSZIkqSds8CRJkiSp\nJ2zwJEmSJKknbPAkSZIkqSds8CRJkiSpJ2zwJEkTI8nTJPMDcWqI155Osjis60mS9CreGPcEJEka\noUdV9eG4JyFJ0mpxBU+SNPGS3EvyRZJfktxMsq3lp5PMJVlIMpvkvZZ/J8n3SW61+Khdak2Sr5Ms\nJbmaZGpsNyVJmkg2eJKkSTL13BbNQwOv/VlV7wPngC9b7ivg26raCXwHnG35s8APVfUBsAtYavnt\nwPmq2gH8ARxc5fuRJOkZqapxz0GSpJFI8rCq3nxB/h6wr6ruJlkL/F5Vm5M8ALZU1ZOWX66qt5Pc\nB96tqscD15gGrlXV9vb8JLC2qs6s/p1JktRxBU+SpE79zfhlPB4YP8Wz7pKkEbPBkySpc2jg8ac2\n/hH4uI0/BW608SxwDCDJmiQbRzVJSZL+ib8sSpImyVSS+YHnV6pq5a8S3kqyQLcK90nLfQZ8k+QE\ncB840vLHgQtJjtKt1B0Dlld99pIk/QvP4EmSJl47g7e7qh6Mey6SJL0Ot2hKkiRJUk+4gidJkiRJ\nPVLBcDsAAAA2SURBVOEKniRJkiT1hA2eJEmSJPWEDZ4kSZIk9YQNniRJkiT1hA2eJEmSJPWEDZ4k\nSZIk9cRflWT5dWIKU4kAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x186dba62d68>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "ax = plt.figure(figsize=(15,8))\n",
    "plt.plot(range(len(lossdown)),lossdown)\n",
    "plt.title('Training loss ')\n",
    "plt.xlabel('Epoch')\n",
    "plt.ylabel('Loss')\n",
    "plt.show(ax)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "count    1000.000000\n",
       "mean       11.061524\n",
       "std         0.575088\n",
       "min        10.000377\n",
       "25%        10.549865\n",
       "50%        11.090504\n",
       "75%        11.562022\n",
       "max        11.995127\n",
       "Name: distance, dtype: float64"
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "testSet[\"distance\"].describe()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "count    7000.000000\n",
       "mean       14.306769\n",
       "std         7.759768\n",
       "min         0.687198\n",
       "25%         8.357792\n",
       "50%        11.068380\n",
       "75%        21.150343\n",
       "max        36.505566\n",
       "Name: distance, dtype: float64"
      ]
     },
     "execution_count": 17,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "trainSet[\"distance\"].describe()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>distance</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>count</th>\n",
       "      <td>3476.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>mean</th>\n",
       "      <td>7.987758</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>std</th>\n",
       "      <td>2.177995</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>min</th>\n",
       "      <td>0.687198</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25%</th>\n",
       "      <td>6.593829</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>50%</th>\n",
       "      <td>8.343216</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>75%</th>\n",
       "      <td>9.513436</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>max</th>\n",
       "      <td>11.995372</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "          distance\n",
       "count  3476.000000\n",
       "mean      7.987758\n",
       "std       2.177995\n",
       "min       0.687198\n",
       "25%       6.593829\n",
       "50%       8.343216\n",
       "75%       9.513436\n",
       "max      11.995372"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "trainSet.loc[trainSet[\"t\"]==0][[\"distance\"]].describe()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>distance</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>count</th>\n",
       "      <td>3524.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>mean</th>\n",
       "      <td>20.539710</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>std</th>\n",
       "      <td>6.057052</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>min</th>\n",
       "      <td>10.007508</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25%</th>\n",
       "      <td>15.884379</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>50%</th>\n",
       "      <td>21.112254</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>75%</th>\n",
       "      <td>25.108676</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>max</th>\n",
       "      <td>36.505566</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "          distance\n",
       "count  3524.000000\n",
       "mean     20.539710\n",
       "std       6.057052\n",
       "min      10.007508\n",
       "25%      15.884379\n",
       "50%      21.112254\n",
       "75%      25.108676\n",
       "max      36.505566"
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "trainSet.loc[trainSet[\"t\"]==1][[\"distance\"]].describe()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from sklearn import svm"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 107,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "6766 0.966571428571\n"
     ]
    }
   ],
   "source": [
    "svmC = svm.SVC(C=1.0, kernel='rbf')\n",
    "# svmC = svm.SVC(kernel='poly')\n",
    "svmC.fit(trainSet[[\"a\", \"b\", \"c\"]], trainSet[\"t\"])\n",
    "svmTrainPredict = svmC.predict(trainSet[[\"a\", \"b\", \"c\"]])\n",
    "print(sum(svmTrainPredict==trainSet[\"t\"]), sum(svmTrainPredict==trainSet[\"t\"])/7000.0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 108,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "650"
      ]
     },
     "execution_count": 108,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "svmPredict = svmC.predict(testSet[[\"a\", \"b\", \"c\"]])\n",
    "sum(svmPredict==testSet[\"t\"])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 105,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "100 6995 0.999285714286 765\n",
      "120 6996 0.999428571429 764\n",
      "140 6997 0.999571428571 767\n",
      "160 6998 0.999714285714 769\n",
      "180 6998 0.999714285714 769\n",
      "200 6998 0.999714285714 769\n",
      "220 6998 0.999714285714 769\n",
      "240 6998 0.999714285714 769\n",
      "260 6998 0.999714285714 770\n",
      "280 6998 0.999714285714 770\n",
      "300 6998 0.999714285714 770\n",
      "320 6998 0.999714285714 770\n",
      "340 6998 0.999714285714 770\n",
      "360 6998 0.999714285714 771\n",
      "380 6998 0.999714285714 771\n",
      "400 6998 0.999714285714 772\n",
      "420 6998 0.999714285714 772\n",
      "440 6998 0.999714285714 772\n",
      "460 6999 0.999857142857 772\n",
      "480 6999 0.999857142857 772\n",
      "500 6999 0.999857142857 772\n"
     ]
    }
   ],
   "source": [
    "for number in range(100, 501, 20):\n",
    "    svmC = svm.SVC(C=number, kernel='rbf', tol=0.0001)\n",
    "    svmC.fit(trainSet[[\"a\", \"b\", \"c\"]], trainSet[\"t\"])\n",
    "    svmTrainPredict = svmC.predict(trainSet[[\"a\", \"b\", \"c\"]])\n",
    "    svmPredict = svmC.predict(testSet[[\"a\", \"b\", \"c\"]])\n",
    "    print(number, sum(svmTrainPredict==trainSet[\"t\"]), sum(svmTrainPredict==trainSet[\"t\"])/7000.0, sum(svmPredict==testSet[\"t\"]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 106,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "5 6884 0.983428571429 721\n",
      "10 6930 0.99 736\n",
      "15 6952 0.993142857143 748\n",
      "20 6963 0.994714285714 751\n",
      "25 6975 0.996428571429 762\n",
      "30 6975 0.996428571429 760\n",
      "35 6979 0.997 762\n",
      "40 6982 0.997428571429 763\n",
      "45 6985 0.997857142857 767\n",
      "50 6988 0.998285714286 768\n",
      "55 6988 0.998285714286 769\n",
      "60 6989 0.998428571429 767\n",
      "65 6991 0.998714285714 766\n",
      "70 6993 0.999 767\n",
      "75 6994 0.999142857143 767\n",
      "80 6994 0.999142857143 768\n",
      "85 6995 0.999285714286 768\n",
      "90 6995 0.999285714286 768\n",
      "95 6995 0.999285714286 766\n",
      "100 6995 0.999285714286 765\n"
     ]
    }
   ],
   "source": [
    "for number in range(5, 101, 5):\n",
    "    svmC = svm.SVC(C=number, kernel='rbf', tol=0.0001)\n",
    "    svmC.fit(trainSet[[\"a\", \"b\", \"c\"]], trainSet[\"t\"])\n",
    "    svmTrainPredict = svmC.predict(trainSet[[\"a\", \"b\", \"c\"]])\n",
    "    svmPredict = svmC.predict(testSet[[\"a\", \"b\", \"c\"]])\n",
    "    print(number, sum(svmTrainPredict==trainSet[\"t\"]), sum(svmTrainPredict==trainSet[\"t\"])/7000.0, sum(svmPredict==testSet[\"t\"]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# from sklearn.mixture import GMM\n",
    "# gmm = GMM(n_components=4).fit(trainSet[[\"distance\"]])\n",
    "# labels = gmm.predict(testSet[[\"distance\"]])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 60,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "6502"
      ]
     },
     "execution_count": 60,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "svmCC = svm.SVC(kernel='linear')\n",
    "svmCC.fit(trainSet[[\"distance\"]], trainSet[\"t\"])\n",
    "svmCCTrainPredict = svmCC.predict(trainSet[[\"distance\"]])\n",
    "sum(svmCCTrainPredict==trainSet[\"t\"])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 61,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "505"
      ]
     },
     "execution_count": 61,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "svmCCTrainPredict = svmCC.predict(testSet[[\"distance\"]])\n",
    "sum(svmCCTrainPredict==testSet[\"t\"])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 73,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "6524\n",
      "525\n",
      "************\n",
      "6530\n",
      "527\n",
      "************\n",
      "6542\n",
      "529\n",
      "************\n",
      "6535\n",
      "524\n",
      "************\n",
      "6531\n",
      "516\n",
      "************\n",
      "6518\n",
      "503\n",
      "************\n",
      "6503\n",
      "501\n",
      "************\n",
      "6503\n",
      "500\n",
      "************\n",
      "6503\n",
      "502\n",
      "************\n",
      "6502\n",
      "507\n",
      "************\n",
      "6506\n",
      "503\n",
      "************\n",
      "6509\n",
      "497\n",
      "************\n",
      "6512\n",
      "503\n",
      "************\n",
      "6496\n",
      "496\n",
      "************\n",
      "6502\n",
      "496\n",
      "************\n",
      "6500\n",
      "497\n",
      "************\n",
      "6484\n",
      "488\n",
      "************\n",
      "6477\n",
      "484\n",
      "************\n",
      "6479\n",
      "484\n",
      "************\n",
      "6480\n",
      "483\n",
      "************\n",
      "6476\n",
      "475\n",
      "************\n"
     ]
    }
   ],
   "source": [
    "List = [10, 10.1, 10.2, 10.3, 10.4, 10.5, 10.6, 10.7, 10.8, 10.9, 11, 11.1, 11.2, 11.3, 11.4, 11.5, 11.6, 11.7, 11.8, 11.9, 12]\n",
    "for number in List:\n",
    "    print(sum((trainSet[\"distance\"]>number) == (trainSet[\"t\"]==1)))\n",
    "    print(sum((testSet[\"distance\"]>number) == (testSet[\"t\"]==1)))\n",
    "    print('************')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
