{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import pandas as pd\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "from sklearn import datasets\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn import metrics\n",
    "from sklearn.neighbors import DistanceMetric\n",
    "from sklearn import preprocessing"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class KNNClassifier(object):\n",
    "    def __init__(self):\n",
    "        self.X_train = None\n",
    "        self.y_train = None\n",
    "        self.k = 1\n",
    "        self.distance = 'euclidean'\n",
    "\n",
    "    def any_distance(self, a, b):\n",
    "        dist = DistanceMetric.get_metric(self.distance)\n",
    "        matDist = dist.pairwise([a,b])\n",
    "        return matDist[0,-1]\n",
    "\n",
    "    def closest(self, row):\n",
    "        dists = [self.any_distance(row, item) for _,item in self.X_train.iterrows()]\n",
    "        neighbors = sorted(dists)[:self.k]\n",
    "        # nei = dists.index(min(dists))\n",
    "        # print(neighbors)\n",
    "        \n",
    "        nei = [dists.index(x) for x in neighbors]\n",
    "#         print(nei)\n",
    "        votes = self.y_train.iloc[nei]\n",
    "        votes = np.array(votes)\n",
    "#         print(votes)\n",
    "        label = np.argmax(np.bincount(votes))\n",
    "        return label\n",
    "\n",
    "    def fit(self, training_data, training_labels, k=1, distance='euclidean'):\n",
    "        self.X_train = training_data\n",
    "        self.y_train = training_labels\n",
    "        self.k = k\n",
    "        self.distance = distance\n",
    "\n",
    "    def predict(self, to_classify):\n",
    "        print('Predicting...')\n",
    "        predictions = []\n",
    "        for _,row in to_classify.iterrows():\n",
    "            label = self.closest(row)\n",
    "            #print('Predicted:',label)\n",
    "            predictions.append(label)\n",
    "        return predictions"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "ename": "FileNotFoundError",
     "evalue": "File b'train.csv' does not exist",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mFileNotFoundError\u001b[0m                         Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-4-2903870308e1>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mdataset\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpd\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread_csv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'train.csv'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      2\u001b[0m \u001b[0;31m#test.csv file does not have Survived column. Thus, I've prefered to split train.csv.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      3\u001b[0m \u001b[0;31m#test_data = pd.read_csv('test.csv')\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      4\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      5\u001b[0m \u001b[0;31m#Removing Non-relevant features\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/miniconda3/envs/DataScience/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36mparser_f\u001b[0;34m(filepath_or_buffer, sep, delimiter, header, names, index_col, usecols, squeeze, prefix, mangle_dupe_cols, dtype, engine, converters, true_values, false_values, skipinitialspace, skiprows, nrows, na_values, keep_default_na, na_filter, verbose, skip_blank_lines, parse_dates, infer_datetime_format, keep_date_col, date_parser, dayfirst, iterator, chunksize, compression, thousands, decimal, lineterminator, quotechar, quoting, escapechar, comment, encoding, dialect, tupleize_cols, error_bad_lines, warn_bad_lines, skipfooter, skip_footer, doublequote, delim_whitespace, as_recarray, compact_ints, use_unsigned, low_memory, buffer_lines, memory_map, float_precision)\u001b[0m\n\u001b[1;32m    653\u001b[0m                     skip_blank_lines=skip_blank_lines)\n\u001b[1;32m    654\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 655\u001b[0;31m         \u001b[0;32mreturn\u001b[0m \u001b[0m_read\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfilepath_or_buffer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    656\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    657\u001b[0m     \u001b[0mparser_f\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__name__\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/miniconda3/envs/DataScience/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m_read\u001b[0;34m(filepath_or_buffer, kwds)\u001b[0m\n\u001b[1;32m    403\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    404\u001b[0m     \u001b[0;31m# Create the parser.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 405\u001b[0;31m     \u001b[0mparser\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mTextFileReader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfilepath_or_buffer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    406\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    407\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mchunksize\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0miterator\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/miniconda3/envs/DataScience/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, f, engine, **kwds)\u001b[0m\n\u001b[1;32m    762\u001b[0m             \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moptions\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'has_index_names'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mkwds\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'has_index_names'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    763\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 764\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_make_engine\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mengine\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    765\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    766\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mclose\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/miniconda3/envs/DataScience/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m_make_engine\u001b[0;34m(self, engine)\u001b[0m\n\u001b[1;32m    983\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m_make_engine\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mengine\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'c'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    984\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mengine\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'c'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 985\u001b[0;31m             \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_engine\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mCParserWrapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moptions\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    986\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    987\u001b[0m             \u001b[0;32mif\u001b[0m \u001b[0mengine\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'python'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/miniconda3/envs/DataScience/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, src, **kwds)\u001b[0m\n\u001b[1;32m   1603\u001b[0m         \u001b[0mkwds\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'allow_leading_cols'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mindex_col\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mFalse\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1604\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1605\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_reader\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mparsers\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTextReader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msrc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1606\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1607\u001b[0m         \u001b[0;31m# XXX\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader.__cinit__ (pandas/_libs/parsers.c:4209)\u001b[0;34m()\u001b[0m\n",
      "\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader._setup_parser_source (pandas/_libs/parsers.c:8873)\u001b[0;34m()\u001b[0m\n",
      "\u001b[0;31mFileNotFoundError\u001b[0m: File b'train.csv' does not exist"
     ]
    }
   ],
   "source": [
    "dataset = pd.read_csv('train.csv')\n",
    "#test.csv file does not have Survived column. Thus, I've prefered to split train.csv.\n",
    "#test_data = pd.read_csv('test.csv') \n",
    "\n",
    "#Removing Non-relevant features\n",
    "del dataset['Cabin']\n",
    "del dataset['Ticket']\n",
    "del dataset['PassengerId']\n",
    "del dataset['Name']\n",
    "\n",
    "#Mapping numerical or NaN features values\n",
    "dataset['Age'] = dataset.Age.fillna(dataset.Age.mean())\n",
    "dataset = dataset.where((pd.notnull(dataset)), 0)\n",
    "for row in [\"Sex\", \"Embarked\"]:\n",
    "    dataset[row] = dataset[row].astype('category')\n",
    "    dataset[row] = dataset[row].cat.codes\n",
    "datasetCopy = dataset.copy()\n",
    "\n",
    "#Spliting dataset\n",
    "list(dataset)\n",
    "Y = dataset['Survived'].copy()\n",
    "del dataset['Survived']\n",
    "X = dataset\n",
    "\n",
    "X_train, X_test, Y_train, Y_test = train_test_split(X,Y,test_size = 0.3)\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "knn = KNNClassifier()\n",
    "knn.fit(X_train, Y_train, k=10, distance='minkowski')\n",
    "result = knn.predict(X_test)\n",
    "score = metrics.accuracy_score(y_pred = result, y_true = Y_test)\n",
    "print(score)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Survived</th>\n",
       "      <th>Pclass</th>\n",
       "      <th>Sex</th>\n",
       "      <th>Age</th>\n",
       "      <th>SibSp</th>\n",
       "      <th>Parch</th>\n",
       "      <th>Fare</th>\n",
       "      <th>Embarked</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>Survived</th>\n",
       "      <td>1.000000</td>\n",
       "      <td>-0.338481</td>\n",
       "      <td>-0.543351</td>\n",
       "      <td>-0.069809</td>\n",
       "      <td>-0.035322</td>\n",
       "      <td>0.081629</td>\n",
       "      <td>0.257307</td>\n",
       "      <td>-0.176509</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>Pclass</th>\n",
       "      <td>-0.338481</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.131900</td>\n",
       "      <td>-0.331339</td>\n",
       "      <td>0.083081</td>\n",
       "      <td>0.018443</td>\n",
       "      <td>-0.549500</td>\n",
       "      <td>0.173511</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>Sex</th>\n",
       "      <td>-0.543351</td>\n",
       "      <td>0.131900</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.084153</td>\n",
       "      <td>-0.114631</td>\n",
       "      <td>-0.245489</td>\n",
       "      <td>-0.182333</td>\n",
       "      <td>0.118492</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>Age</th>\n",
       "      <td>-0.069809</td>\n",
       "      <td>-0.331339</td>\n",
       "      <td>0.084153</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>-0.232625</td>\n",
       "      <td>-0.179191</td>\n",
       "      <td>0.091566</td>\n",
       "      <td>-0.039610</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>SibSp</th>\n",
       "      <td>-0.035322</td>\n",
       "      <td>0.083081</td>\n",
       "      <td>-0.114631</td>\n",
       "      <td>-0.232625</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.414838</td>\n",
       "      <td>0.159651</td>\n",
       "      <td>0.071480</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>Parch</th>\n",
       "      <td>0.081629</td>\n",
       "      <td>0.018443</td>\n",
       "      <td>-0.245489</td>\n",
       "      <td>-0.179191</td>\n",
       "      <td>0.414838</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.216225</td>\n",
       "      <td>0.043351</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>Fare</th>\n",
       "      <td>0.257307</td>\n",
       "      <td>-0.549500</td>\n",
       "      <td>-0.182333</td>\n",
       "      <td>0.091566</td>\n",
       "      <td>0.159651</td>\n",
       "      <td>0.216225</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>-0.230365</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>Embarked</th>\n",
       "      <td>-0.176509</td>\n",
       "      <td>0.173511</td>\n",
       "      <td>0.118492</td>\n",
       "      <td>-0.039610</td>\n",
       "      <td>0.071480</td>\n",
       "      <td>0.043351</td>\n",
       "      <td>-0.230365</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "          Survived    Pclass       Sex       Age     SibSp     Parch  \\\n",
       "Survived  1.000000 -0.338481 -0.543351 -0.069809 -0.035322  0.081629   \n",
       "Pclass   -0.338481  1.000000  0.131900 -0.331339  0.083081  0.018443   \n",
       "Sex      -0.543351  0.131900  1.000000  0.084153 -0.114631 -0.245489   \n",
       "Age      -0.069809 -0.331339  0.084153  1.000000 -0.232625 -0.179191   \n",
       "SibSp    -0.035322  0.083081 -0.114631 -0.232625  1.000000  0.414838   \n",
       "Parch     0.081629  0.018443 -0.245489 -0.179191  0.414838  1.000000   \n",
       "Fare      0.257307 -0.549500 -0.182333  0.091566  0.159651  0.216225   \n",
       "Embarked -0.176509  0.173511  0.118492 -0.039610  0.071480  0.043351   \n",
       "\n",
       "              Fare  Embarked  \n",
       "Survived  0.257307 -0.176509  \n",
       "Pclass   -0.549500  0.173511  \n",
       "Sex      -0.182333  0.118492  \n",
       "Age       0.091566 -0.039610  \n",
       "SibSp     0.159651  0.071480  \n",
       "Parch     0.216225  0.043351  \n",
       "Fare      1.000000 -0.230365  \n",
       "Embarked -0.230365  1.000000  "
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#Correlation analysis\n",
    "corr = datasetCopy.corr()\n",
    "corr"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#Dataset Normalization\n",
    "datasetNorm = pd.DataFrame(preprocessing.scale(dataset))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>0</th>\n",
       "      <th>1</th>\n",
       "      <th>2</th>\n",
       "      <th>3</th>\n",
       "      <th>4</th>\n",
       "      <th>5</th>\n",
       "      <th>6</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.827377</td>\n",
       "      <td>0.737695</td>\n",
       "      <td>-0.592481</td>\n",
       "      <td>0.432793</td>\n",
       "      <td>-0.473674</td>\n",
       "      <td>-0.502445</td>\n",
       "      <td>0.587966</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>-1.566107</td>\n",
       "      <td>-1.355574</td>\n",
       "      <td>0.638789</td>\n",
       "      <td>0.432793</td>\n",
       "      <td>-0.473674</td>\n",
       "      <td>0.786845</td>\n",
       "      <td>-1.912644</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.827377</td>\n",
       "      <td>-1.355574</td>\n",
       "      <td>-0.284663</td>\n",
       "      <td>-0.474545</td>\n",
       "      <td>-0.473674</td>\n",
       "      <td>-0.488854</td>\n",
       "      <td>0.587966</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>-1.566107</td>\n",
       "      <td>-1.355574</td>\n",
       "      <td>0.407926</td>\n",
       "      <td>0.432793</td>\n",
       "      <td>-0.473674</td>\n",
       "      <td>0.420730</td>\n",
       "      <td>0.587966</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.827377</td>\n",
       "      <td>0.737695</td>\n",
       "      <td>0.407926</td>\n",
       "      <td>-0.474545</td>\n",
       "      <td>-0.473674</td>\n",
       "      <td>-0.486337</td>\n",
       "      <td>0.587966</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "          0         1         2         3         4         5         6\n",
       "0  0.827377  0.737695 -0.592481  0.432793 -0.473674 -0.502445  0.587966\n",
       "1 -1.566107 -1.355574  0.638789  0.432793 -0.473674  0.786845 -1.912644\n",
       "2  0.827377 -1.355574 -0.284663 -0.474545 -0.473674 -0.488854  0.587966\n",
       "3 -1.566107 -1.355574  0.407926  0.432793 -0.473674  0.420730  0.587966\n",
       "4  0.827377  0.737695  0.407926 -0.474545 -0.473674 -0.486337  0.587966"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "datasetNorm.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#Features Scaling such that the more strongly correlated a feature is with Y, \n",
    "#then the more the feature will influence in the distance\n",
    "datasetNorm[0] *= np.absolute(corr['Survived']['Pclass'])\n",
    "datasetNorm[1] *= np.absolute(corr['Survived']['Sex'])\n",
    "datasetNorm[2] *= np.absolute(corr['Survived']['Age'])\n",
    "datasetNorm[3] *= np.absolute(corr['Survived']['SibSp'])\n",
    "datasetNorm[4] *= np.absolute(corr['Survived']['Parch'])\n",
    "datasetNorm[5] *= np.absolute(corr['Survived']['Fare'])\n",
    "datasetNorm[6] *= np.absolute(corr['Survived']['Embarked'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>0</th>\n",
       "      <th>1</th>\n",
       "      <th>2</th>\n",
       "      <th>3</th>\n",
       "      <th>4</th>\n",
       "      <th>5</th>\n",
       "      <th>6</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.280052</td>\n",
       "      <td>0.400828</td>\n",
       "      <td>-0.041360</td>\n",
       "      <td>0.015287</td>\n",
       "      <td>-0.038666</td>\n",
       "      <td>-0.129282</td>\n",
       "      <td>0.103781</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>-0.530097</td>\n",
       "      <td>-0.736553</td>\n",
       "      <td>0.044593</td>\n",
       "      <td>0.015287</td>\n",
       "      <td>-0.038666</td>\n",
       "      <td>0.202460</td>\n",
       "      <td>-0.337599</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.280052</td>\n",
       "      <td>-0.736553</td>\n",
       "      <td>-0.019872</td>\n",
       "      <td>-0.016762</td>\n",
       "      <td>-0.038666</td>\n",
       "      <td>-0.125785</td>\n",
       "      <td>0.103781</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>-0.530097</td>\n",
       "      <td>-0.736553</td>\n",
       "      <td>0.028477</td>\n",
       "      <td>0.015287</td>\n",
       "      <td>-0.038666</td>\n",
       "      <td>0.108257</td>\n",
       "      <td>0.103781</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.280052</td>\n",
       "      <td>0.400828</td>\n",
       "      <td>0.028477</td>\n",
       "      <td>-0.016762</td>\n",
       "      <td>-0.038666</td>\n",
       "      <td>-0.125138</td>\n",
       "      <td>0.103781</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "          0         1         2         3         4         5         6\n",
       "0  0.280052  0.400828 -0.041360  0.015287 -0.038666 -0.129282  0.103781\n",
       "1 -0.530097 -0.736553  0.044593  0.015287 -0.038666  0.202460 -0.337599\n",
       "2  0.280052 -0.736553 -0.019872 -0.016762 -0.038666 -0.125785  0.103781\n",
       "3 -0.530097 -0.736553  0.028477  0.015287 -0.038666  0.108257  0.103781\n",
       "4  0.280052  0.400828  0.028477 -0.016762 -0.038666 -0.125138  0.103781"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "datasetNorm.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "X = datasetNorm\n",
    "X_train, X_test, Y_train, Y_test = train_test_split(X,Y,test_size=0.25)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "knn = KNNClassifier()\n",
    "knn.fit(X_train, Y_train, k=10, distance='euclidean')\n",
    "result = knn.predict(X_test)\n",
    "score = metrics.accuracy_score(y_pred = result, y_true = Y_test)\n",
    "print(score)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Predicting...\n",
      "K= 1  Score= 0.766816143498\n",
      "Predicting...\n",
      "K= 5  Score= 0.820627802691\n",
      "Predicting...\n",
      "K= 9  Score= 0.816143497758\n",
      "Predicting...\n",
      "K= 13  Score= 0.816143497758\n",
      "Predicting...\n",
      "K= 17  Score= 0.820627802691\n",
      "Predicting...\n",
      "K= 21  Score= 0.820627802691\n",
      "Predicting...\n",
      "K= 25  Score= 0.820627802691\n",
      "Predicting...\n",
      "K= 29  Score= 0.816143497758\n",
      "Predicting...\n",
      "K= 33  Score= 0.816143497758\n",
      "Predicting...\n",
      "K= 37  Score= 0.816143497758\n",
      "Predicting...\n",
      "K= 41  Score= 0.816143497758\n",
      "Predicting...\n",
      "K= 45  Score= 0.816143497758\n",
      "Predicting...\n",
      "K= 49  Score= 0.816143497758\n",
      "Predicting...\n",
      "K= 53  Score= 0.816143497758\n",
      "Predicting...\n",
      "K= 57  Score= 0.816143497758\n",
      "Predicting...\n",
      "K= 61  Score= 0.816143497758\n",
      "Predicting...\n",
      "K= 65  Score= 0.811659192825\n",
      "Predicting...\n",
      "K= 69  Score= 0.811659192825\n",
      "Predicting...\n",
      "K= 73  Score= 0.816143497758\n",
      "Predicting...\n",
      "K= 77  Score= 0.816143497758\n",
      "Predicting...\n",
      "K= 81  Score= 0.820627802691\n",
      "Predicting...\n",
      "K= 85  Score= 0.820627802691\n",
      "Predicting...\n",
      "K= 89  Score= 0.820627802691\n",
      "Predicting...\n",
      "K= 93  Score= 0.825112107623\n",
      "Predicting...\n",
      "K= 97  Score= 0.807174887892\n",
      "Predicting...\n",
      "K= 101  Score= 0.798206278027\n",
      "Predicting...\n",
      "K= 105  Score= 0.798206278027\n",
      "Predicting...\n",
      "K= 109  Score= 0.798206278027\n",
      "Predicting...\n",
      "K= 113  Score= 0.798206278027\n",
      "Predicting...\n",
      "K= 117  Score= 0.798206278027\n"
     ]
    }
   ],
   "source": [
    "knn = KNNClassifier()\n",
    "accuracies = []\n",
    "\n",
    "for i in range(0,30):\n",
    "    k = 1 + i*4\n",
    "    kRange.append(k)\n",
    "    knn.fit(X_train, Y_train, k=k, distance='euclidean')\n",
    "    result = knn.predict(X_test)\n",
    "    score = metrics.accuracy_score(y_pred = result, y_true = Y_test)\n",
    "    print('K=', k, ' Score=', score)\n",
    "    accuracies.append(score)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYwAAAEKCAYAAAAB0GKPAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xu8VHW9//HXhy33i4rKVhGFlKPiPQhNLUjLME3M6giV\nlplEaYqVJcfspj9NzayTHHmQF6wMj6YpJaWlbPNn5QGV5Ca5BVTwAubxsiGEDZ/zx3eNezHMnlmz\nmbVnZu338/GYx15rzVprPh/Zzmev7/e7vsvcHRERkVK6VTsAERGpDyoYIiKSiAqGiIgkooIhIiKJ\nqGCIiEgiKhgiIpKICoaIiCSigiEiIomoYIiISCI7VDuAStp111196NChifdft24dffv2TS+gKsha\nTlnLB7KXU9bygezlVCyfxx9//FV33y3JeTJVMIYOHcr8+fMT79/U1MTYsWPTC6gKspZT1vKB7OWU\ntXwgezkVy8fMnkt6HjVJiYhIIioYIiKSiAqGiIgkooIhIiKJqGCIiEgiKhgiIpKICoaIiCSigiEi\n9W/mTJg8GTZvrnYkmZapG/dEpAv6y1/g7LNhyxaYOBHGjKl2RJmlKwwRqV8tLXDmmaFYACxaVN14\nMk4FQ0Tq10UXwbPPQrfoq0wFI1UqGCJSn37/e5g+HXr0gKuvDttUMFKlgiEi9eef/wz9FgCXXQaf\n+lRYXrQI3KsXV8apYIhIfXGHL38ZXnoJjj0WvvY12H13GDgQXn8dXnyx2hFmlgqGiNSXWbPgjjug\nXz+49VZoaAAzOOSQ8L6apVKjgiEi9WPVKjj33LB83XXwrne1vXfwweHnwoWdH1cXoYIhIvVhyxY4\n66zQ7HTyyW19GDm5gqErjNSoYIhIffiv/4I//Ql22QV+9rPQDBWngpE6FQwRqX3LlsE3vhGWZ8wI\nndz5Djoo/FyyRFOEpEQFQ0Rq26ZNcMYZ8K9/hbu6Tzut8H477wyDB4f9Vqzo3Bi7CBUMEaltV14J\n8+bBkCHwn/9ZfF+NlEqVCoaI1K758+H73w/LM2fCjjsW318jpVKV6my1ZjYO+AnQANzo7j/Ie39H\n4JfA3lEsP3T3W8xsCPBzoBFwYIa7/yTNWBN5+ml46CH4whfCdASdafNm+PGPS96UtO8LL8Bvf5v8\nvGbwyU/CkUduZ4DSpb39Nlx7bbgDu5LuvTf87k+ZAscdV3p/dXynKrWCYWYNwDTgQ8AqYJ6ZzXb3\nJbHdzgWWuPtHzWw3YJmZ3Qa0Al9z9yfMrD/wuJn9Me/Yzjd1KtxzD6xc2TZ3TWe56iq45JKSuw3p\nyLlnzgx/ke2xR0eOFgn/b1x3XTrnPvBAuOKKZPuqYKQqzSuM0UCzuy8HMLPbgfFA/Evfgf5mZkA/\n4DWg1d1fAl4CcPe3zGwpMDjv2M738svh5w9/GMaBv//9nfO5Tz4J3/lOWL7kktC5147mZ59lv333\nTX7u3/wGHn00XDX97nfbDlUUKWGnJ58MxaKhIfye9ulTuZM3NMDHPga9eyfb/8ADw+/wP/4Rrnp6\n9qxcLJJqwRgMvBBbXwXkt3tcD8wGXgT6A6e7+5b4DmY2FDgCeCytQBN7443w0x0++1l46ino3z/d\nz9ywIYwQaW2F886Dyy8vuvuqpib2Gzs2+fknTAh/lc2ZAzfeCOecs33xStfyxhsccNVVYfnSS8Or\nmvr0gf32g2eeCUUj1wkuFVHtJ+59GFgAHAfsC/zRzB5x9zcBzKwfcBcwJbctn5lNAiYBNDY20tTU\nlPjDW1paytr/vWvX0hPY0NhIr5UreWnCBJZddFHi4zti3xtuYMjixawfMoT5J53ElhLxlpsTwKDz\nzmPE5Zez+fzzmdenDxsGD+54wBXWkXxqXZZyOuAHP2D3V17hzf3358ljjsFrIK+Ddt+d3Z55hiV3\n3MGaDvapZOnfCCqYj7un8gLeC9wfW58KTM3b5z7gfbH1h4DR0XJ34H7gq0k/c+TIkV6OuXPnlrW/\n9+vnDu5/+Yt7z55h+d57yztHOZqa3M3cGxrcH3ss0SFl5+TuvmWL+7//e8jnmGPcW1vLP0dKOpRP\njctMTnff7Q7e2qOH+9Kl1Y6mzaWXht/lqVM7fIrM/BtFiuUDzPeE37FpDqudBww3s2Fm1gOYQGh+\ninseOB7AzBqB/YHlUZ/GTcBSd/9RijEmt3lzeBykWRhRdOWVYfs558DatZX/vDffDM1e7qHfYvTo\nyn9GjhnccEPo9H700dBHI1LMK6/ApEkALP/iF+GAA6ocUIw6vlOTWsFw91bgPMJVwlLgDndfbGaT\nzWxytNtlwNFmthB4EPimu78KHAOcARxnZgui10fSijWRN6MWsf79w+MgL7gAxo6FNWvgi1+s/ENb\npkyB556DUaPgW9+q7LkLGTgQbr45LF96Kfz97+l/ptQn9/CH0quvwvHHs/rUU6sd0dZUMFKTah+G\nu88B5uRtmx5bfhE4ocBx/x+oreE6uQ7vAQPCz27dwnDUQw8NI41+/vNwRVAJ994Lt9wCvXqF83bv\nXpnzljJuHHzpS+Fq44wzwt21GmUi+W6+Odzrs+OO4ff02WerHdHWhg8P/8+sWBFaBfr1q3ZEmaE7\nvZPKXWHE7zTdZ5+2qQrOPz9cEWyvNWvaRir94AdhmGBnuuaaMMpk4UL49rc797Ol9q1YEa5+AaZN\nC9N11Jru3dv+v1lS3ZH4WaOCkVTuCiN/aoIzz4RTTw0F5ayzwpz9HZW71F+7Fo4/Hr7ylY6fq6P6\n9oVf/CJcQV1zDTzySOfHILVp8+ZwFd3SAp/4RNtztGuRmqVSoYKRVH6TVI5ZmG550CCYOxd+sh0z\nmMycCbNnt13qd6vSP89RR4U7d3P3m7z1VnXikNryox+FPyB23x2mT6/tmzw1p1QqVDCSKtQklbPb\nbuGBLhC+aDtyGbxyZehIB/jpT6t/qf/tb8MRR4QmiK9+tbqxSPUtXNg2+OKmm8JDjGqZrjBSoYKR\nVHtNUjmnnAKf/3yYjuCMM2DjxuTnzl3qv/UWfPzj8JnPbH+826tHj9A01bNnuAO8nAkNJVviv9OT\nJsFHqjtgMREVjFSoYCRVqmBAmE9n6FB44gm47LLk577uOvjzn6GxsbYu9Q86qG3Sty98IZ37TaT2\nffe7YZj1vvuGGWnrwT77hP64l18Ow3+lIqo9NUj9aK8PI27AALj11nB/xhVXhBvhBg4sft6WlrZZ\naG+6CXbdtSLhVsyUKaFf5eGHw1XQmWd26scPWrKkbdLHjHgnpz337LwJLOPc4cEHk32RvvpqmJm5\nW7fwu10vQ1S7dQtXGY89BosXw5gx1Y4oE1QwkirWhxH3/vfD174W7pY+99zk5z/nHDjppI7Hl5bc\nF8Uhh8Dvfx9enWhEp35a59gqpzvuCM8j6UwzZsDkyaX3i7v4YjjmmHTiSUuuYCxcqIJRISoYSSVp\nksq5/PLwV9yqVcnOPWhQ8vn+q2GffeCuu8LIre0ZNtwBa9asYdCgQZ36mWlbs2YNg7p3hwceCF/c\nxx7bec8iaW5uG8Rw8smh2aaUIUPge99LN640qB+j4lQwkiqnYPTsmb35mD70ofDqZEuamhhUznTt\ndWBJUxODxoyBE0+E+++Hs8+G++5Lv++qtTU0Ka5fH6a1nzUr3c+rNhWMilOnd1K5JqlifRgiSZmF\nKTZ23jk0882Ykf5nXnMN/PWvoe9k2rT0P6/a4gWj0nO9dVEqGEmVc4UhksSee4Z5uyA0EzU3p/dZ\nCxa0PbXxlltKD8bIgsbGcL/IG2/A6tXVjiYTVDCSUsGQNJx+emgeWr8+NBdt3lz5z9iwIdzbs2kT\nfPnLcMI2831mk1nbE/fULFURKhhJJRlWK9IR06aFq42//jUMYa20Sy8NQ0uHD0/n/LVMU4RUlApG\nEu7Jh9WKlGvgwNBMBKHZaMGCyp374YfDzXYNDeHO/SSjorJEHd8VpYKRxPr1oamgV68wZYZIpZ1w\nQmgu2rQpTMOxYcP2nzP+1Mb/+I/wpMiuRgWjolQwklD/hXSGq68OzUaLFoVmpO2Ve2rju99dmfPV\no4MOCj+XLEmnf6iLUcFIQv0X0hnizyK59trQnNRRuac29uwZztlZT22sNTvtFG483LABli+vdjR1\nTwUjCfVfSGc58sjQfOQOn/tc2+9eOfKf2jgiixOslEHNUhWjgpGEmqSkM33726EZaeVKuPDC8o51\nD1OQr10LH/hAeHRwV6eRUhWjgpGECoZ0pu7d255FcvPNoXkpqZkzw/4DBoTlaj21sZboCqNiNJdU\nEpoWRDrbiBFw5ZXhDvBzzgnPWSk1JPaf/9z6qY177516mHVBBaNiVDCS0BWGVMMFF4QnHc6dC4cf\nnvy4004LQ3MlOPDAcKX1j3+Epwf27FntiOqWCkYSKhhSDd26hWal009P/rTDwYNr66mNtaB3b9hv\nv1Awli2DQw+tdkR1SwUjCQ2rlWrZe+8wZYhsn4MPDgVj0SIVjO2Qao+YmY0zs2Vm1mxmFxd4f0cz\n+62Z/d3MFpvZWbH3bjazNWZW/YZHDasVqW8aKVURqRUMM2sApgEnEp5KOdHM8geEnwsscffDgLHA\ntWaWm3tjJjAurfjKoiYpkfqmju+KSPMKYzTQ7O7L3X0jcDswPm8fB/qbmQH9gNeAVgB3/3O0Xn0q\nGCL1TQWjItLswxgMvBBbXwXkz352PTAbeBHoD5zu7mU9NNrMJgGTABobG2lqakp8bEtLS6L93716\nNQOAJ5qbebPGJx9MmlO9yFo+kL2c6iEfa23lfd27023lSh6ZM4fNffoU3b8ecipHxfJx91RewCeA\nG2PrZwDXF9jnOsCA/YAVwIDY+0OBRUk/c+TIkV6OuXPnJtvx3/7NHdyXLCnr/NWQOKc6kbV83LOX\nU93kc9hh4f/jv/2t5K51k1NCxfIB5nvC79g0m6RWA0Ni63tF2+LOAu6O4m6OCsYBKcbUMWqSEql/\napbabmkWjHnAcDMbFnVkTyA0P8U9DxwPYGaNwP5A7U0pqYIhUv80Umq7pVYw3L0VOA+4H1gK3OHu\ni81ssplNjna7DDjazBYCDwLfdPdXAcxsFvBXYH8zW2VmZ6cVa1EbN4apkRsaoES7p4jUMF1hbLdU\nb9xz9znAnLxt02PLLwIFn0jv7hPTjC2x+DxSuntWpH6pYGw3TWVZipqjRLJh772hXz945ZXkU63I\nVlQwStG0ICLZ0K1b21XG4sXVjaVOqWCUomlBRLJDzVLbRQWjFDVJiWRHrmA89VR146hTKhilqGCI\nZMfo0eHnH/4AW8qaVEJQwShNT9sTyY4jj4QhQ+CFFzRtfAeoYJSiKwyR7OjWDSZMCMuzZlU3ljqk\nglGKCoZItuQKxp13QmtrdWOpMyoYpahgiGTLEUfA8OGwZg1kaEbazqCCUYr6MESyxQwmRhNJqFmq\nLCoYpegKQyR7Tj89/Lz7bnj77erGUkdUMEpRwRDJnhEj4NBD4fXX4YEHqh1N3VDBKEVNUiLZpGap\nsqlglKIrDJFsyjVL3XsvrFtX3VjqhApGKSoYItk0bFi4kW/9erjvvmpHUxdUMIrZsgXeeissq0lK\nJHvULFUWFYxiWlrAHfr2DU/cE5Fs+eQnwzDbOXPaWhOkXSoYxag5SiTb9twTxo4Nj2K+555qR1Pz\nVDCKUcEQyT7NLZWYCkYxKhgi2ffxj8MOO8Cf/qRHt5agglGM7sEQyb5ddoETToDNm+Guu6odTU0r\nWTDM7CtmtnNnBFNzdIUh0jWoWSqRJFcYjcA8M7vDzMaZmaUdVM1QwRDpGsaPh1694JFHYNWqakdT\ns0oWDHf/FjAcuAn4HPCMmV1hZvumHFv1qUlKpGsYMABOOikMo7/zzmpHU7MS9WG4uwMvR69WYGfg\n12Z2dYqxVZ+uMES6DjVLlZSkD+MCM3scuBp4FDjE3b8EjAQ+XuLYcWa2zMyazeziAu/vaGa/NbO/\nm9liMzsr6bGdQgVDpOs46STo1w/mzaPX6tXVjqYmJbnCGAic5u4fdvc73X0TgLtvAU5u7yAzawCm\nAScCI4CJZjYib7dzgSXufhgwFrjWzHokPDZ9KhgiXUfv3nDqqQAMmju3ysHUpiQF4/fAa7kVMxtg\nZkcCuPvSIseNBprdfbm7bwRuB8bn7eNA/6gjvV/0Oa0Jj02f+jBEupaoWWrQQw9VOZDatEOCfW4A\n3h1bbymwrZDBwAux9VXAkXn7XA/MBl4E+gOnu/sWM0tyLABmNgmYBNDY2EhTGc/obWlpKbr/Yc89\nx87AghUreL1Onv1bKqd6k7V8IHs5ZSkf69GDo/v3p9+KFcy75RbWDRtW7ZAqolL/RkkKhkWd3kBo\nijKzJMcl8WFgAXAcsC/wRzN7pJwTuPsMYAbAqFGjfOzYsYmPbWpqouj+0Qjiw8eMgVGjygmrakrm\nVGeylg9kL6es5cOECfCzn/Ge5cvhrLNK718HKvVvlKRJarmZnW9m3aPXBcDyBMetBobE1veKtsWd\nBdztQTOwAjgg4bHpyzVJqQ9DpOuIj5Zq+1tZSFYwJgNHE76wc01DkxIcNw8YbmbDzKwHMIHQ/BT3\nPHA8gJk1AvsTilGSY9OX6/RWH4ZI1zFmDG8PHAjPPguPP17taGpKkhv31rj7BHcf5O6N7v4pd1+T\n4LhW4DzgfmApcIe7LzazyWY2OdrtMuBoM1sIPAh8091fbe/YjqXYQe4aJSXSFTU0sDbXfHP77VUN\npdaU7Isws17A2cBBQK/cdnf/fKlj3X0OMCdv2/TY8ovACUmP7VQbNsCmTdCjR5gyQES6jDXHHcde\nd98Nv/xl+A6oBxdeCLvtlupHJOm8/gXwNKGD+vvApwl/9WebhtSKdFlvjhgRnvm9YgVceWW1w0nm\nzDNromDs5+6fNLPx7n6rmf0KKGskU11Sc5RI12UWnsB333310/G9666pf0SSgrEp+vm6mR1MmE9q\nUHoh1QgVDJGu7dBDw0vekaRgzIieh/EtwkilfsClqUZVC1QwRES2UrRgmFk34E13/1/gz8C7OiWq\nWqA+DBGRrRQdVhtNMPiNToqltugKQ0RkK0lu3PuTmX3dzIaY2cDcK/XIqk0FQ0RkK0n6ME6Pfp4b\n2+ZkvXlK04KIiGylZMFw92xM11guTQsiIrKVJHd6n1lou7v/vPLh1BA1SYmIbCVJk9R7Ysu9CJMF\nPgGoYIiIdCFJmqS+El83s50IT8DLNg2rFRHZSpJRUvnWAdnv19AVhojIVpL0YfyWMCoKQoEZAdyR\nZlA1QQVDRGQrSfowfhhbbgWec/dVKcVTOzSsVkRkK0kKxvPAS+6+AcDMepvZUHdfmWpk1aZhtSIi\nW0nSh3EnsCW2vjnall2trbBuXZjiuF+/akcjIlITkhSMHdx9Y24lWq6TR1B1UHyEVLeOjAsQEcme\nJN+Ga83slNyKmY0HXk0vpBqg/gsRkW0k6cOYDNxmZtdH66uAgnd/Z4b6L0REtpHkxr1ngaPMrF+0\n3pJ6VNWmIbUiItso2SRlZleY2U7u3uLuLWa2s5ld3hnBVY0KhojINpL0YZzo7q/nVqKn730kvZBq\ngKYFERHZRpKC0WBmPXMrZtYb6Flk//qnKwwRkW0k6fS+DXjQzG4BDPgccGuaQVWdCoaIyDZKXmG4\n+1XA5cCBwP7A/cA+SU5uZuPMbJmZNZvZxQXev8jMFkSvRWa2Off4VzO7INq22MymlJXV9tKwWhGR\nbSS9K+0VwgSEnwSOA5aWOsDMGoBpwImECQsnmtmI+D7ufo27H+7uhwNTgYfd/TUzOxg4BxgNHAac\nbGb7JYx1+2lYrYjINtotGGb2b2b2HTN7GvgpYU4pc/cPuPv17R0XMxpodvfl0d3htwPji+w/EZgV\nLR8IPObu6929FXgYOC3BZ1aGmqRERLZRrA/jaeAR4GR3bwYwswvLOPdg4IXY+irgyEI7mlkfYBxw\nXrRpEfD/zGwX4F+EUVnz2zl2EjAJoLGxkaampsQBtrS0FNz/kOXL2QVY+Pzz/LOM89WC9nKqV1nL\nB7KXU9bygezlVKl8ihWM04AJwFwz+wPhCsG2+xML+yjwqLu/BuDuS83sKuABwgObFhAmPdyGu88A\nZgCMGjXKx44dm/hDm5qaKLh/9+4AHHLssTBmTPIsakC7OdWprOUD2cspa/lA9nKqVD7tNkm5+z3u\nPgE4AJgLTAEGmdkNZnZCgnOvBobE1veKthUygbbmqNzn3+TuI939/cD/Av9I8JmVoT4MEZFtJBkl\ntc7df+XuHyV86T8JfDPBuecBw81smJn1IBSF2fk7mdmOwBjg3rztg6KfexOudn6V4DMrQ30YIiLb\nSHIfxjuiu7zfaQIqsW+rmZ1HGIbbANzs7ovNbHL0/vRo148BD7j7urxT3BX1YWwCzo3fbZ46DasV\nEdlGWQWjXO4+B5iTt2163vpMYGaBY9+XZmztctfUICIiBejpQPnWrYPNm6F373c6v0VERAVjW+q/\nEBEpSAUjn/ovREQKUsHIpyG1IiIFqWDkU5OUiEhBKhj5VDBERApSwcinIbUiIgWpYOTTFYaISEEq\nGPlUMEREClLByKdhtSIiBalg5NOwWhGRglQw8qlJSkSkIBWMfCoYIiIFqWDkUx+GiEhBKhj51Ich\nIlKQCkY+NUmJiBSkgpFPTVIiIgWpYMRt3AgbNkBDQ3iAkoiIvEMFIy7eHGVW3VhERGqMCkac+i9E\nRNqlghGn/gsRkXapYMRpSK2ISLtUMOLUJCUi0i4VjDgVDBGRdqlgxKkPQ0SkXakWDDMbZ2bLzKzZ\nzC4u8P5FZrYgei0ys81mNjB670IzWxxtn2VmvdKMFVAfhohIEakVDDNrAKYBJwIjgIlmNiK+j7tf\n4+6Hu/vhwFTgYXd/zcwGA+cDo9z9YKABmJBWrO9Qk5SISLvSvMIYDTS7+3J33wjcDowvsv9EYFZs\nfQegt5ntAPQBXkwt0hw1SYmItGuHFM89GHghtr4KOLLQjmbWBxgHnAfg7qvN7IfA88C/gAfc/YF2\njp0ETAJobGykqakpcYAtLS1b7T/imWcYBCxZtYo1ZZynluTnVO+ylg9kL6es5QPZy6li+bh7Ki/g\nE8CNsfUzgOvb2fd04Lex9Z2Bh4DdgO7APcBnSn3myJEjvRxz587desO4ce7g/rvflXWeWrJNTnUu\na/m4Zy+nrOXjnr2ciuUDzPeE3+tpNkmtBobE1veKthUyga2boz4IrHD3te6+CbgbODqVKOPUhyEi\n0q40C8Y8YLiZDTOzHoSiMDt/JzPbERgD3Bvb/DxwlJn1MTMDjgeWphhroD4MEZF2pdaH4e6tZnYe\ncD9hlNPN7r7YzCZH70+Pdv0YoY9iXezYx8zs18ATQCvwJDAjrVjfoWG1IiLtSrPTG3efA8zJ2zY9\nb30mMLPAsd8BvpNieNtSk5SISLt0p3fOli3w1lthuX//6sYiIlKDVDBy4sWioaG6sYiI1CAVjBz1\nX4iIFKWCkaP+CxGRolQwcjSkVkSkKBWMHDVJiYgUpYKRoyYpEZGiVDByVDBERIpSwchRH4aISFEq\nGDnqwxARKUoFI0dNUiIiRalg5KhJSkSkKBWMHF1hiIgUpYKRoz4MEZGiVDBydIUhIlKUCkaO+jBE\nRIpSwchRk5SISFEqGADuapISESlBBQNgwwZobYWePcNLRES2oYIBuroQEUlABQPUfyEikoAKBugK\nQ0QkARUM0JBaEZEEVDBAVxgiIgmkWjDMbJyZLTOzZjO7uMD7F5nZgui1yMw2m9lAM9s/tn2Bmb1p\nZlNSC1R9GCIiJe2Q1onNrAGYBnwIWAXMM7PZ7r4kt4+7XwNcE+3/UeBCd38NeA04PHae1cBv0opV\nVxgiIqWleYUxGmh29+XuvhG4HRhfZP+JwKwC248HnnX351KIMVAfhohISWkWjMHAC7H1VdG2bZhZ\nH2AccFeBtydQuJBUjpqkRERKSq1JqkwfBR6NmqPeYWY9gFOAqe0daGaTgEkAjY2NNDU1Jf7QlpYW\nmpqa2P/pp9kDWPbyy7xUxvG1KJdTVmQtH8heTlnLB7KXU8XycfdUXsB7gftj61OBqe3s+xvgUwW2\njwceSPqZI0eO9HLMnTs3LHziE+7g/t//XdbxteidnDIia/m4Zy+nrOXjnr2ciuUDzPeE37FpNknN\nA4ab2bDoSmECMDt/JzPbERgD3FvgHO31a1SWOr1FREpKrUnK3VvN7DzgfqABuNndF5vZ5Oj96dGu\nHyNcRayLH29mfQkjrL6YVozvUB+GiEhJqfZhuPscYE7etul56zOBmQWOXQfskmJ4bXSFISJSku70\nBg2rFRFJQAUDdIUhIpKACsamTbB+PXTrBn37VjsaEZGapYLx1lvh54ABYFbdWEREapgKhpqjREQS\nUcHQkFoRkURUMDZtgj32gN13r3YkIiI1rVbmkqqe97wHXnyx2lGIiNQ8XWGIiEgiKhgiIpKICoaI\niCSigiEiIomoYIiISCIqGCIikogKhoiIJKKCISIiiVh4pGs2mNla4LkyDtkVeDWlcKolazllLR/I\nXk5Zyweyl1OxfPZx992SnCRTBaNcZjbf3UdVO45KylpOWcsHspdT1vKB7OVUqXzUJCUiIomoYIiI\nSCJdvWDMqHYAKchaTlnLB7KXU9bygezlVJF8unQfhoiIJNfVrzBERCShLlswzGycmS0zs2Yzu7ja\n8ZTLzIaY2VwzW2Jmi83sgmj7QDP7o5k9E/3cudqxlsPMGszsSTP7XbRe7/nsZGa/NrOnzWypmb23\nnnMyswuj37dFZjbLzHrVWz5mdrOZrTGzRbFt7eZgZlOj74llZvbh6kRdXDs5XRP93j1lZr8xs51i\n73Uopy5ZMMysAZgGnAiMACaa2YjqRlW2VuBr7j4COAo4N8rhYuBBdx8OPBit15MLgKWx9XrP5yfA\nH9z9AOAwQm51mZOZDQbOB0a5+8FAAzCB+stnJjAub1vBHKL/pyYAB0XH/Ff0/VFrZrJtTn8EDnb3\nQ4F/AFNh+3LqkgUDGA00u/tyd98I3A6Mr3JMZXH3l9z9iWj5LcIX0WBCHrdGu90KnFqdCMtnZnsB\nJwE3xjY2GtUNAAAE1klEQVTXcz47Au8HbgJw943u/jp1nBPhKZ29zWwHoA/wInWWj7v/GXgtb3N7\nOYwHbnf3t919BdBM+P6oKYVycvcH3L01Wv0bsFe03OGcumrBGAy8EFtfFW2rS2Y2FDgCeAxodPeX\nordeBhqrFFZH/Bj4BrAltq2e8xkGrAVuiZrZbjSzvtRpTu6+Gvgh8DzwEvCGuz9AneaTp70csvJd\n8Xng99Fyh3PqqgUjM8ysH3AXMMXd34y/52EIXF0MgzOzk4E17v54e/vUUz6RHYB3Aze4+xHAOvKa\na+opp6hdfzyhEO4J9DWzz8T3qad82pOFHOLM7BJCE/Zt23uurlowVgNDYut7Rdvqipl1JxSL29z9\n7mjzK2a2R/T+HsCaasVXpmOAU8xsJaGJ8Dgz+yX1mw+Ev9xWuftj0fqvCQWkXnP6ILDC3de6+ybg\nbuBo6jefuPZyqOvvCjP7HHAy8Glvu4eiwzl11YIxDxhuZsPMrAehA2h2lWMqi5kZoW18qbv/KPbW\nbOCz0fJngXs7O7aOcPep7r6Xuw8l/Hs85O6foU7zAXD3l4EXzGz/aNPxwBLqN6fngaPMrE/0+3c8\noe+sXvOJay+H2cAEM+tpZsOA4cD/VCG+spnZOEIT7ynuvj72Vsdzcvcu+QI+Qhg58CxwSbXj6UD8\nxxIum58CFkSvjwC7EEZ5PAP8CRhY7Vg7kNtY4HfRcl3nAxwOzI/+ne4Bdq7nnIDvAU8Di4BfAD3r\nLR9gFqEPZhPhKvDsYjkAl0TfE8uAE6sdfxk5NRP6KnLfD9O3Nyfd6S0iIol01SYpEREpkwqGiIgk\nooIhIiKJqGCIiEgiKhgiIpKICoYIYGZuZtfG1r9uZt8tccwppWY6NrOxuZl3C7y30sx27VDAIlWg\ngiESvA2cVs4XuLvPdvcfpBhTu6LJ/0Q6lQqGSNBKeIzlhflvmNluZnaXmc2LXsdE2z9nZtdHy/ua\n2d/MbKGZXW5mLbFT9Is9E+O26C7pnG9Ex/yPme0XnWuomT0UPcfgQTPbO9o+08ymm9ljwNVmNsbM\nFkSvJ82sf0r/bUQAFQyRuGnAp6NpyeN+Alzn7u8BPs7W06/H9/mJux9CuNM27ghgCuHZK+8izJuV\n80Z0zPWE2XoBfgrc6uE5BrcB/xnbfy/gaHf/KvB14Fx3Pxx4H/CvcpIVKZcKhkjEw2y/Pyc8JCju\ng8D1ZraAMA/PgGiW4Lj3AndGy7/Ke+9/3H2Vu28hTNEwNPberNjP98bOlTvHLwjTwOTc6e6bo+VH\ngR+Z2fnATt727AORVKhgiGztx4R5ePrGtnUDjnL3w6PXYHdvKXx4QW/HljcTpj3P8XaW27PunZ1D\n/8kXgN7Ao2Z2QBkxiZRNBUMkxt1fA+4gFI2cB4Cv5FbM7PACh/6N0FwFYbbdpE6P/fxrtPyX2Dk+\nDTxS6EAz29fdF7r7VYQZmFUwJFUqGCLbuhaIj5Y6HxgVdUIvASYXOGYK8FUzewrYD3gj4WftHB1z\nAW0d7l8Bzoq2nxG9V8gUM1sU7beJtieqiaRCs9WKVICZ9QH+5e5uZhOAie5eV8+JFylFY7lFKmMk\noWPcgNcJz1AWyRRdYYiISCLqwxARkURUMEREJBEVDBERSUQFQ0REElHBEBGRRFQwREQkkf8DQLsG\ndgYLuisAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x10e4e6ba8>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "fig,ax = plt.subplots()\n",
    "ax.plot([1+4*i for i in range(0,30)],accuracies,'r',linewidth=2)\n",
    "ax.set_ylabel('Accuracy')\n",
    "ax.set_xlabel('Neighbors')\n",
    "\n",
    "plt.grid()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Predicting...\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "0.82511210762331844"
      ]
     },
     "execution_count": 27,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "highestAcc = max(accuracies)\n",
    "bestK = accuracies.index(highestAcc)*4 + 1\n",
    "\n",
    "knn.fit(X_train, Y_train, k=bestK, distance='euclidean')\n",
    "result = knn.predict(X_test)\n",
    "score = metrics.accuracy_score(y_pred = result, y_true = Y_test)\n",
    "score"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "             precision    recall  f1-score   support\n",
      "\n",
      "          0       0.93      0.81      0.87       156\n",
      "          1       0.66      0.87      0.75        67\n",
      "\n",
      "avg / total       0.85      0.83      0.83       223\n",
      "\n"
     ]
    }
   ],
   "source": [
    "print(metrics.classification_report(result, Y_test))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
