{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Phase 3 Weighted Bagging"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\zake7\\Anaconda3\\lib\\site-packages\\gensim\\utils.py:1197: UserWarning: detected Windows; aliasing chunkize to chunkize_serial\n",
      "  warnings.warn(\"detected Windows; aliasing chunkize to chunkize_serial\")\n",
      "Using TensorFlow backend.\n",
      "C:\\Users\\zake7\\Anaconda3\\lib\\site-packages\\fuzzywuzzy\\fuzz.py:35: UserWarning: Using slow pure-python SequenceMatcher. Install python-Levenshtein to remove this warning\n",
      "  warnings.warn('Using slow pure-python SequenceMatcher. Install python-Levenshtein to remove this warning')\n"
     ]
    }
   ],
   "source": [
    "import pandas as pd\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "from os import listdir\n",
    "from os.path import isfile, join\n",
    "\n",
    "import os\n",
    "import re\n",
    "import csv\n",
    "import codecs\n",
    "import gensim\n",
    "import itertools\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import operator\n",
    "import sys\n",
    "\n",
    "from nltk import ngrams\n",
    "from collections import Counter\n",
    "from string import punctuation\n",
    "from keras.preprocessing.text import Tokenizer\n",
    "from keras.preprocessing.sequence import pad_sequences\n",
    "\n",
    "from iwillwin.trainer.supervised_trainer import KerasModelTrainer\n",
    "from iwillwin.data_utils.data_helpers import DataTransformer, DataLoader\n",
    "from iwillwin.config import dataset_config\n",
    "from iwillwin.data_utils.feature_engineering import FeatureCreator\n",
    "\n",
    "from fuzzywuzzy import fuzz\n",
    "from nltk.corpus import stopwords\n",
    "from tqdm import tqdm\n",
    "from scipy.stats import skew, kurtosis\n",
    "from scipy.spatial.distance import cosine, cityblock, jaccard, canberra, euclidean, minkowski, braycurtis\n",
    "from nltk import word_tokenize\n",
    "\n",
    "import seaborn as sns\n",
    "%matplotlib inline\n",
    "\n",
    "import lightgbm as lgb\n",
    "from sklearn.model_selection import train_test_split\n",
    "import matplotlib.pyplot as plt\n",
    "from sklearn.model_selection import KFold\n",
    "\n",
    "import os\n",
    "import re\n",
    "import csv\n",
    "import codecs\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import operator\n",
    "from os import listdir\n",
    "from os.path import isfile, join\n",
    "\n",
    "########################################\n",
    "## import packages\n",
    "########################################\n",
    "import os\n",
    "import re\n",
    "import csv\n",
    "import codecs\n",
    "import numpy as np\n",
    "np.random.seed(1337)\n",
    "\n",
    "import pandas as pd\n",
    "import operator\n",
    "import sys\n",
    "\n",
    "from string import punctuation\n",
    "from keras.preprocessing.text import Tokenizer\n",
    "from keras.preprocessing.sequence import pad_sequences\n",
    "\n",
    "from iwillwin.trainer.supervised_trainer import KerasModelTrainer\n",
    "from iwillwin.data_utils.data_helpers import DataTransformer, DataLoader\n",
    "from iwillwin.config import dataset_config\n",
    "from keras.utils import to_categorical"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Building prefix dict from the default dictionary ...\n",
      "Dumping model to file cache C:\\Users\\zake7\\AppData\\Local\\Temp\\jieba.cache\n",
      "Loading model cost 0.848 seconds.\n",
      "Prefix dict has been built succesfully.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[DataHelper] Apply normalization on value-type columns\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\zake7\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\validation.py:475: DataConversionWarning: Data with input dtype int64 was converted to float64 by MinMaxScaler.\n",
      "  warnings.warn(msg, DataConversionWarning)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Doing preprocessing...\n",
      "Transforming words to indices...\n",
      "Shape of data tensor: (320552, 50) (320552, 50)\n",
      "Shape of label tensor: (320552,)\n",
      "Preprocessed.\n",
      "Number of unique words 83265\n"
     ]
    }
   ],
   "source": [
    "NB_WORDS, MAX_SEQUENCE_LENGTH = 50000, 50\n",
    "data_transformer = DataTransformer(max_num_words=NB_WORDS, max_sequence_length=MAX_SEQUENCE_LENGTH, char_level=False,\n",
    "                                   normalization=True, features_processed=True)\n",
    "trains_nns, tests_nns, labels = data_transformer.prepare_data(dual=False)\n",
    "print(\"Number of unique words\", len(data_transformer.tokenizer.index_docs))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "trains_meta = trains_nns[2]\n",
    "tests_meta = tests_nns[2]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "train_df = pd.read_csv('../data/dataset/train.csv')\n",
    "test_df = pd.read_csv('../data/dataset/test.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n"
     ]
    }
   ],
   "source": [
    "rumor_words = ['辟谣', '谣言', '勿传', '假的']\n",
    "\n",
    "def is_rumor(text):\n",
    "    if type(text) != str:\n",
    "        print(text, type(text))\n",
    "        return 0\n",
    "    for rumor_word in rumor_words:\n",
    "        if rumor_word in text:\n",
    "            return 1\n",
    "    return 0\n",
    "\n",
    "def has_split_symbol(text):\n",
    "    if type(text) != str:\n",
    "        return 0\n",
    "    if '|' in text:\n",
    "        return 1\n",
    "    return 0\n",
    "\n",
    "for df in [train_df, test_df]:\n",
    "    df['has_|'] = df['title2_zh'].apply(has_split_symbol)\n",
    "    df['has_rumor_words'] = df['title2_zh'].apply(is_rumor)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "train_has_rumor = train_df.has_rumor_words.values\n",
    "test_has_rumor = test_df.has_rumor_words.values\n",
    "\n",
    "trick_trains_features = np.concatenate((trains_nns[2], train_has_rumor.reshape((-1, 1))), axis=1)\n",
    "trick_tests_features = np.concatenate((tests_nns[2], test_has_rumor.reshape((-1, 1))), axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "oof_file_names = sorted([f for f in listdir('../data/pseudo/oofs/') if isfile(join('../data/pseudo/oofs/', f)) and f != '.gitkeep'])\n",
    "preds_file_names = [name.replace('-Train', '') for name in oof_file_names]\n",
    "\n",
    "oofs = []\n",
    "preds = []\n",
    "for name in oof_file_names:\n",
    "    oofs.append(pd.read_csv('../data/pseudo/oofs/' + name))\n",
    "for name in preds_file_names:\n",
    "    preds.append(pd.read_csv('../data/pseudo/output/' + name))    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 3Embedding-DecomposalbeAttention-NoMeta-ClassWeighted-NoEM-Train-L0.857551-NB5000.csv\n",
      "1 P3Embedding-3LayersDenseCNN42-NoDrop-NoClassWeighted-withEM-Train-L0.297362-NB5000.csv\n",
      "2 P3Embedding-3LayersDenseRNN42-Drop01-NoMeta-NoClassWeighted-WithEM-Train-L0.292235-NB5000.csv\n",
      "3 P3Embedding-ESIM-Drop01-NoMeta-NoClassWeighted-NoEM-Train-L0.283131-NB5000.csv\n",
      "4 PS3Embedding-3LayersDenseCNN42-NoDrop-NoClassWeighted-withEM-Train-L0.853793-NB5000.csv\n",
      "5 PS3Embedding-3LayersDenseRNN42-Drop01-NoMeta-NoClassWeighted-WithEM-Train-L0.853764-NB5000.csv\n",
      "6 PS3Embedding-ESIM-Drop01-NoMeta-NoClassWeighted-NoEM-Train-L0.863459-NB5000.csv\n",
      "7 PSWordSGNS-DAttn-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.861867-NB100000.csv\n",
      "8 PSWordSGNS-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.851927-NB100000.csv\n",
      "9 PSWordSGNS-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.855071-NB100000.csv\n",
      "10 PSWordSGNS-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.860951-NB100000.csv\n",
      "11 PSWordTC-DAttn-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.861878-NB100000.csv\n",
      "12 PSWordTC-DenseRNN-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.862805-NB100000.csv\n",
      "13 PSWordTC-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.863699-NB100000.csv\n",
      "14 PWordSGNS-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.852129-NB100000.csv\n",
      "15 PWordSGNS-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.310416-NB100000.csv\n",
      "16 PWordTC-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.847205-NB100000.csv\n",
      "17 PWordTC-DenseRNN-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.308049-NB100000.csv\n",
      "18 PWordTC-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.306949-NB100000.csv\n"
     ]
    }
   ],
   "source": [
    "for i, name in enumerate(oof_file_names):\n",
    "    print(i, name)\n",
    "    \n",
    "trains = pd.DataFrame()\n",
    "tests = pd.DataFrame()\n",
    "\n",
    "for i in range(len(oof_file_names)):\n",
    "    for label_type in ['agreed', 'disagreed', 'unrelated']:\n",
    "        trains['oofs_{}_{}'.format(i, label_type)] = oofs[i][label_type].values\n",
    "        tests['oofs_pred{}_{}'.format(i, label_type)] = preds[i][label_type].values"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "['3Embedding-DecomposalbeAttention-NoMeta-ClassWeighted-NoEM-Train-L0.857551-NB5000.csv',\n",
       " 'P3Embedding-3LayersDenseCNN42-NoDrop-NoClassWeighted-withEM-Train-L0.297362-NB5000.csv',\n",
       " 'P3Embedding-3LayersDenseRNN42-Drop01-NoMeta-NoClassWeighted-WithEM-Train-L0.292235-NB5000.csv',\n",
       " 'P3Embedding-ESIM-Drop01-NoMeta-NoClassWeighted-NoEM-Train-L0.283131-NB5000.csv',\n",
       " 'PS3Embedding-3LayersDenseCNN42-NoDrop-NoClassWeighted-withEM-Train-L0.853793-NB5000.csv',\n",
       " 'PS3Embedding-3LayersDenseRNN42-Drop01-NoMeta-NoClassWeighted-WithEM-Train-L0.853764-NB5000.csv',\n",
       " 'PS3Embedding-ESIM-Drop01-NoMeta-NoClassWeighted-NoEM-Train-L0.863459-NB5000.csv',\n",
       " 'PSWordSGNS-DAttn-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.861867-NB100000.csv',\n",
       " 'PSWordSGNS-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.851927-NB100000.csv',\n",
       " 'PSWordSGNS-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.855071-NB100000.csv',\n",
       " 'PSWordSGNS-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.860951-NB100000.csv',\n",
       " 'PSWordTC-DAttn-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.861878-NB100000.csv',\n",
       " 'PSWordTC-DenseRNN-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.862805-NB100000.csv',\n",
       " 'PSWordTC-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.863699-NB100000.csv',\n",
       " 'PWordSGNS-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.852129-NB100000.csv',\n",
       " 'PWordSGNS-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.310416-NB100000.csv',\n",
       " 'PWordTC-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.847205-NB100000.csv',\n",
       " 'PWordTC-DenseRNN-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.308049-NB100000.csv',\n",
       " 'PWordTC-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.306949-NB100000.csv']"
      ]
     },
     "execution_count": 41,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "oof_file_names"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "unrelated = pd.DataFrame()\n",
    "agreeds = pd.DataFrame()\n",
    "disagreeds = pd.DataFrame()\n",
    "\n",
    "#check_oofs = True\n",
    "check_oofs = False\n",
    "\n",
    "\n",
    "if check_oofs:\n",
    "    for i, oof in enumerate(oofs):\n",
    "        agreeds['oofs_agreed_{}'.format(i)] = oofs[i]['agreed'].values\n",
    "        unrelated['oofs_unrelated_{}'.format(i)] = oofs[i]['unrelated'].values\n",
    "        disagreeds['oofs_disagreeds_{}'.format(i)] = oofs[i]['disagreed'].values\n",
    "else:\n",
    "    for i, oof in enumerate(oofs):\n",
    "        agreeds['oofs_agreed_{}'.format(i)] = preds[i]['agreed'].values\n",
    "        unrelated['oofs_unrelated_{}'.format(i)] = preds[i]['unrelated'].values\n",
    "        disagreeds['oofs_disagreeds_{}'.format(i)] = preds[i]['disagreed'].values  "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>oofs_agreed_0</th>\n",
       "      <th>oofs_agreed_1</th>\n",
       "      <th>oofs_agreed_2</th>\n",
       "      <th>oofs_agreed_3</th>\n",
       "      <th>oofs_agreed_4</th>\n",
       "      <th>oofs_agreed_5</th>\n",
       "      <th>oofs_agreed_6</th>\n",
       "      <th>oofs_agreed_7</th>\n",
       "      <th>oofs_agreed_8</th>\n",
       "      <th>oofs_agreed_9</th>\n",
       "      <th>oofs_agreed_10</th>\n",
       "      <th>oofs_agreed_11</th>\n",
       "      <th>oofs_agreed_12</th>\n",
       "      <th>oofs_agreed_13</th>\n",
       "      <th>oofs_agreed_14</th>\n",
       "      <th>oofs_agreed_15</th>\n",
       "      <th>oofs_agreed_16</th>\n",
       "      <th>oofs_agreed_17</th>\n",
       "      <th>oofs_agreed_18</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_0</th>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.985454</td>\n",
       "      <td>0.988323</td>\n",
       "      <td>0.986448</td>\n",
       "      <td>0.984380</td>\n",
       "      <td>0.987315</td>\n",
       "      <td>0.985907</td>\n",
       "      <td>0.973378</td>\n",
       "      <td>0.975002</td>\n",
       "      <td>0.977427</td>\n",
       "      <td>0.975342</td>\n",
       "      <td>0.976909</td>\n",
       "      <td>0.976050</td>\n",
       "      <td>0.976170</td>\n",
       "      <td>0.976952</td>\n",
       "      <td>0.976174</td>\n",
       "      <td>0.975576</td>\n",
       "      <td>0.975719</td>\n",
       "      <td>0.976823</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_1</th>\n",
       "      <td>0.985454</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.988241</td>\n",
       "      <td>0.986295</td>\n",
       "      <td>0.995884</td>\n",
       "      <td>0.988644</td>\n",
       "      <td>0.985872</td>\n",
       "      <td>0.976543</td>\n",
       "      <td>0.978513</td>\n",
       "      <td>0.977921</td>\n",
       "      <td>0.978879</td>\n",
       "      <td>0.974114</td>\n",
       "      <td>0.978497</td>\n",
       "      <td>0.978545</td>\n",
       "      <td>0.977649</td>\n",
       "      <td>0.979854</td>\n",
       "      <td>0.979520</td>\n",
       "      <td>0.978561</td>\n",
       "      <td>0.979853</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_2</th>\n",
       "      <td>0.988323</td>\n",
       "      <td>0.988241</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.987435</td>\n",
       "      <td>0.988193</td>\n",
       "      <td>0.996147</td>\n",
       "      <td>0.987102</td>\n",
       "      <td>0.970575</td>\n",
       "      <td>0.976039</td>\n",
       "      <td>0.976866</td>\n",
       "      <td>0.977199</td>\n",
       "      <td>0.973728</td>\n",
       "      <td>0.978022</td>\n",
       "      <td>0.977212</td>\n",
       "      <td>0.976467</td>\n",
       "      <td>0.977935</td>\n",
       "      <td>0.976717</td>\n",
       "      <td>0.977700</td>\n",
       "      <td>0.977855</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_3</th>\n",
       "      <td>0.986448</td>\n",
       "      <td>0.986295</td>\n",
       "      <td>0.987435</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.984308</td>\n",
       "      <td>0.986038</td>\n",
       "      <td>0.996268</td>\n",
       "      <td>0.976276</td>\n",
       "      <td>0.979169</td>\n",
       "      <td>0.978624</td>\n",
       "      <td>0.983136</td>\n",
       "      <td>0.979642</td>\n",
       "      <td>0.983276</td>\n",
       "      <td>0.984887</td>\n",
       "      <td>0.978003</td>\n",
       "      <td>0.984594</td>\n",
       "      <td>0.979907</td>\n",
       "      <td>0.983532</td>\n",
       "      <td>0.985561</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_4</th>\n",
       "      <td>0.984380</td>\n",
       "      <td>0.995884</td>\n",
       "      <td>0.988193</td>\n",
       "      <td>0.984308</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.988589</td>\n",
       "      <td>0.984010</td>\n",
       "      <td>0.974411</td>\n",
       "      <td>0.977064</td>\n",
       "      <td>0.976446</td>\n",
       "      <td>0.976906</td>\n",
       "      <td>0.971661</td>\n",
       "      <td>0.976735</td>\n",
       "      <td>0.976505</td>\n",
       "      <td>0.976157</td>\n",
       "      <td>0.977627</td>\n",
       "      <td>0.977743</td>\n",
       "      <td>0.976696</td>\n",
       "      <td>0.977415</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_5</th>\n",
       "      <td>0.987315</td>\n",
       "      <td>0.988644</td>\n",
       "      <td>0.996147</td>\n",
       "      <td>0.986038</td>\n",
       "      <td>0.988589</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.986175</td>\n",
       "      <td>0.971035</td>\n",
       "      <td>0.974898</td>\n",
       "      <td>0.975453</td>\n",
       "      <td>0.976168</td>\n",
       "      <td>0.971536</td>\n",
       "      <td>0.976450</td>\n",
       "      <td>0.975633</td>\n",
       "      <td>0.974975</td>\n",
       "      <td>0.976913</td>\n",
       "      <td>0.975815</td>\n",
       "      <td>0.976077</td>\n",
       "      <td>0.976816</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_6</th>\n",
       "      <td>0.985907</td>\n",
       "      <td>0.985872</td>\n",
       "      <td>0.987102</td>\n",
       "      <td>0.996268</td>\n",
       "      <td>0.984010</td>\n",
       "      <td>0.986175</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.975417</td>\n",
       "      <td>0.977783</td>\n",
       "      <td>0.977023</td>\n",
       "      <td>0.981811</td>\n",
       "      <td>0.977934</td>\n",
       "      <td>0.981879</td>\n",
       "      <td>0.983655</td>\n",
       "      <td>0.976346</td>\n",
       "      <td>0.983055</td>\n",
       "      <td>0.978560</td>\n",
       "      <td>0.982419</td>\n",
       "      <td>0.984168</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_7</th>\n",
       "      <td>0.973378</td>\n",
       "      <td>0.976543</td>\n",
       "      <td>0.970575</td>\n",
       "      <td>0.976276</td>\n",
       "      <td>0.974411</td>\n",
       "      <td>0.971035</td>\n",
       "      <td>0.975417</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.988611</td>\n",
       "      <td>0.982953</td>\n",
       "      <td>0.986930</td>\n",
       "      <td>0.983626</td>\n",
       "      <td>0.979390</td>\n",
       "      <td>0.982267</td>\n",
       "      <td>0.982601</td>\n",
       "      <td>0.986865</td>\n",
       "      <td>0.989011</td>\n",
       "      <td>0.979822</td>\n",
       "      <td>0.983091</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_8</th>\n",
       "      <td>0.975002</td>\n",
       "      <td>0.978513</td>\n",
       "      <td>0.976039</td>\n",
       "      <td>0.979169</td>\n",
       "      <td>0.977064</td>\n",
       "      <td>0.974898</td>\n",
       "      <td>0.977783</td>\n",
       "      <td>0.988611</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.987310</td>\n",
       "      <td>0.990456</td>\n",
       "      <td>0.983518</td>\n",
       "      <td>0.984780</td>\n",
       "      <td>0.985321</td>\n",
       "      <td>0.987066</td>\n",
       "      <td>0.990289</td>\n",
       "      <td>0.997975</td>\n",
       "      <td>0.984720</td>\n",
       "      <td>0.985544</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_9</th>\n",
       "      <td>0.977427</td>\n",
       "      <td>0.977921</td>\n",
       "      <td>0.976866</td>\n",
       "      <td>0.978624</td>\n",
       "      <td>0.976446</td>\n",
       "      <td>0.975453</td>\n",
       "      <td>0.977023</td>\n",
       "      <td>0.982953</td>\n",
       "      <td>0.987310</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.984884</td>\n",
       "      <td>0.989911</td>\n",
       "      <td>0.989481</td>\n",
       "      <td>0.987805</td>\n",
       "      <td>0.998649</td>\n",
       "      <td>0.985065</td>\n",
       "      <td>0.987453</td>\n",
       "      <td>0.989079</td>\n",
       "      <td>0.988042</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_10</th>\n",
       "      <td>0.975342</td>\n",
       "      <td>0.978879</td>\n",
       "      <td>0.977199</td>\n",
       "      <td>0.983136</td>\n",
       "      <td>0.976906</td>\n",
       "      <td>0.976168</td>\n",
       "      <td>0.981811</td>\n",
       "      <td>0.986930</td>\n",
       "      <td>0.990456</td>\n",
       "      <td>0.984884</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.983689</td>\n",
       "      <td>0.987243</td>\n",
       "      <td>0.989598</td>\n",
       "      <td>0.984538</td>\n",
       "      <td>0.995895</td>\n",
       "      <td>0.991031</td>\n",
       "      <td>0.987307</td>\n",
       "      <td>0.990067</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_11</th>\n",
       "      <td>0.976909</td>\n",
       "      <td>0.974114</td>\n",
       "      <td>0.973728</td>\n",
       "      <td>0.979642</td>\n",
       "      <td>0.971661</td>\n",
       "      <td>0.971536</td>\n",
       "      <td>0.977934</td>\n",
       "      <td>0.983626</td>\n",
       "      <td>0.983518</td>\n",
       "      <td>0.989911</td>\n",
       "      <td>0.983689</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.986204</td>\n",
       "      <td>0.987806</td>\n",
       "      <td>0.989436</td>\n",
       "      <td>0.984225</td>\n",
       "      <td>0.983439</td>\n",
       "      <td>0.986123</td>\n",
       "      <td>0.987201</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_12</th>\n",
       "      <td>0.976050</td>\n",
       "      <td>0.978497</td>\n",
       "      <td>0.978022</td>\n",
       "      <td>0.983276</td>\n",
       "      <td>0.976735</td>\n",
       "      <td>0.976450</td>\n",
       "      <td>0.981879</td>\n",
       "      <td>0.979390</td>\n",
       "      <td>0.984780</td>\n",
       "      <td>0.989481</td>\n",
       "      <td>0.987243</td>\n",
       "      <td>0.986204</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.991436</td>\n",
       "      <td>0.989118</td>\n",
       "      <td>0.987471</td>\n",
       "      <td>0.985098</td>\n",
       "      <td>0.998035</td>\n",
       "      <td>0.991292</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_13</th>\n",
       "      <td>0.976170</td>\n",
       "      <td>0.978545</td>\n",
       "      <td>0.977212</td>\n",
       "      <td>0.984887</td>\n",
       "      <td>0.976505</td>\n",
       "      <td>0.975633</td>\n",
       "      <td>0.983655</td>\n",
       "      <td>0.982267</td>\n",
       "      <td>0.985321</td>\n",
       "      <td>0.987805</td>\n",
       "      <td>0.989598</td>\n",
       "      <td>0.987806</td>\n",
       "      <td>0.991436</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.987627</td>\n",
       "      <td>0.990288</td>\n",
       "      <td>0.985720</td>\n",
       "      <td>0.991746</td>\n",
       "      <td>0.996646</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_14</th>\n",
       "      <td>0.976952</td>\n",
       "      <td>0.977649</td>\n",
       "      <td>0.976467</td>\n",
       "      <td>0.978003</td>\n",
       "      <td>0.976157</td>\n",
       "      <td>0.974975</td>\n",
       "      <td>0.976346</td>\n",
       "      <td>0.982601</td>\n",
       "      <td>0.987066</td>\n",
       "      <td>0.998649</td>\n",
       "      <td>0.984538</td>\n",
       "      <td>0.989436</td>\n",
       "      <td>0.989118</td>\n",
       "      <td>0.987627</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.984773</td>\n",
       "      <td>0.987139</td>\n",
       "      <td>0.988966</td>\n",
       "      <td>0.987761</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_15</th>\n",
       "      <td>0.976174</td>\n",
       "      <td>0.979854</td>\n",
       "      <td>0.977935</td>\n",
       "      <td>0.984594</td>\n",
       "      <td>0.977627</td>\n",
       "      <td>0.976913</td>\n",
       "      <td>0.983055</td>\n",
       "      <td>0.986865</td>\n",
       "      <td>0.990289</td>\n",
       "      <td>0.985065</td>\n",
       "      <td>0.995895</td>\n",
       "      <td>0.984225</td>\n",
       "      <td>0.987471</td>\n",
       "      <td>0.990288</td>\n",
       "      <td>0.984773</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.991009</td>\n",
       "      <td>0.987715</td>\n",
       "      <td>0.990837</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_16</th>\n",
       "      <td>0.975576</td>\n",
       "      <td>0.979520</td>\n",
       "      <td>0.976717</td>\n",
       "      <td>0.979907</td>\n",
       "      <td>0.977743</td>\n",
       "      <td>0.975815</td>\n",
       "      <td>0.978560</td>\n",
       "      <td>0.989011</td>\n",
       "      <td>0.997975</td>\n",
       "      <td>0.987453</td>\n",
       "      <td>0.991031</td>\n",
       "      <td>0.983439</td>\n",
       "      <td>0.985098</td>\n",
       "      <td>0.985720</td>\n",
       "      <td>0.987139</td>\n",
       "      <td>0.991009</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.985160</td>\n",
       "      <td>0.986302</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_17</th>\n",
       "      <td>0.975719</td>\n",
       "      <td>0.978561</td>\n",
       "      <td>0.977700</td>\n",
       "      <td>0.983532</td>\n",
       "      <td>0.976696</td>\n",
       "      <td>0.976077</td>\n",
       "      <td>0.982419</td>\n",
       "      <td>0.979822</td>\n",
       "      <td>0.984720</td>\n",
       "      <td>0.989079</td>\n",
       "      <td>0.987307</td>\n",
       "      <td>0.986123</td>\n",
       "      <td>0.998035</td>\n",
       "      <td>0.991746</td>\n",
       "      <td>0.988966</td>\n",
       "      <td>0.987715</td>\n",
       "      <td>0.985160</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.991585</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_18</th>\n",
       "      <td>0.976823</td>\n",
       "      <td>0.979853</td>\n",
       "      <td>0.977855</td>\n",
       "      <td>0.985561</td>\n",
       "      <td>0.977415</td>\n",
       "      <td>0.976816</td>\n",
       "      <td>0.984168</td>\n",
       "      <td>0.983091</td>\n",
       "      <td>0.985544</td>\n",
       "      <td>0.988042</td>\n",
       "      <td>0.990067</td>\n",
       "      <td>0.987201</td>\n",
       "      <td>0.991292</td>\n",
       "      <td>0.996646</td>\n",
       "      <td>0.987761</td>\n",
       "      <td>0.990837</td>\n",
       "      <td>0.986302</td>\n",
       "      <td>0.991585</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                oofs_agreed_0  oofs_agreed_1  oofs_agreed_2  oofs_agreed_3  \\\n",
       "oofs_agreed_0        1.000000       0.985454       0.988323       0.986448   \n",
       "oofs_agreed_1        0.985454       1.000000       0.988241       0.986295   \n",
       "oofs_agreed_2        0.988323       0.988241       1.000000       0.987435   \n",
       "oofs_agreed_3        0.986448       0.986295       0.987435       1.000000   \n",
       "oofs_agreed_4        0.984380       0.995884       0.988193       0.984308   \n",
       "oofs_agreed_5        0.987315       0.988644       0.996147       0.986038   \n",
       "oofs_agreed_6        0.985907       0.985872       0.987102       0.996268   \n",
       "oofs_agreed_7        0.973378       0.976543       0.970575       0.976276   \n",
       "oofs_agreed_8        0.975002       0.978513       0.976039       0.979169   \n",
       "oofs_agreed_9        0.977427       0.977921       0.976866       0.978624   \n",
       "oofs_agreed_10       0.975342       0.978879       0.977199       0.983136   \n",
       "oofs_agreed_11       0.976909       0.974114       0.973728       0.979642   \n",
       "oofs_agreed_12       0.976050       0.978497       0.978022       0.983276   \n",
       "oofs_agreed_13       0.976170       0.978545       0.977212       0.984887   \n",
       "oofs_agreed_14       0.976952       0.977649       0.976467       0.978003   \n",
       "oofs_agreed_15       0.976174       0.979854       0.977935       0.984594   \n",
       "oofs_agreed_16       0.975576       0.979520       0.976717       0.979907   \n",
       "oofs_agreed_17       0.975719       0.978561       0.977700       0.983532   \n",
       "oofs_agreed_18       0.976823       0.979853       0.977855       0.985561   \n",
       "\n",
       "                oofs_agreed_4  oofs_agreed_5  oofs_agreed_6  oofs_agreed_7  \\\n",
       "oofs_agreed_0        0.984380       0.987315       0.985907       0.973378   \n",
       "oofs_agreed_1        0.995884       0.988644       0.985872       0.976543   \n",
       "oofs_agreed_2        0.988193       0.996147       0.987102       0.970575   \n",
       "oofs_agreed_3        0.984308       0.986038       0.996268       0.976276   \n",
       "oofs_agreed_4        1.000000       0.988589       0.984010       0.974411   \n",
       "oofs_agreed_5        0.988589       1.000000       0.986175       0.971035   \n",
       "oofs_agreed_6        0.984010       0.986175       1.000000       0.975417   \n",
       "oofs_agreed_7        0.974411       0.971035       0.975417       1.000000   \n",
       "oofs_agreed_8        0.977064       0.974898       0.977783       0.988611   \n",
       "oofs_agreed_9        0.976446       0.975453       0.977023       0.982953   \n",
       "oofs_agreed_10       0.976906       0.976168       0.981811       0.986930   \n",
       "oofs_agreed_11       0.971661       0.971536       0.977934       0.983626   \n",
       "oofs_agreed_12       0.976735       0.976450       0.981879       0.979390   \n",
       "oofs_agreed_13       0.976505       0.975633       0.983655       0.982267   \n",
       "oofs_agreed_14       0.976157       0.974975       0.976346       0.982601   \n",
       "oofs_agreed_15       0.977627       0.976913       0.983055       0.986865   \n",
       "oofs_agreed_16       0.977743       0.975815       0.978560       0.989011   \n",
       "oofs_agreed_17       0.976696       0.976077       0.982419       0.979822   \n",
       "oofs_agreed_18       0.977415       0.976816       0.984168       0.983091   \n",
       "\n",
       "                oofs_agreed_8  oofs_agreed_9  oofs_agreed_10  oofs_agreed_11  \\\n",
       "oofs_agreed_0        0.975002       0.977427        0.975342        0.976909   \n",
       "oofs_agreed_1        0.978513       0.977921        0.978879        0.974114   \n",
       "oofs_agreed_2        0.976039       0.976866        0.977199        0.973728   \n",
       "oofs_agreed_3        0.979169       0.978624        0.983136        0.979642   \n",
       "oofs_agreed_4        0.977064       0.976446        0.976906        0.971661   \n",
       "oofs_agreed_5        0.974898       0.975453        0.976168        0.971536   \n",
       "oofs_agreed_6        0.977783       0.977023        0.981811        0.977934   \n",
       "oofs_agreed_7        0.988611       0.982953        0.986930        0.983626   \n",
       "oofs_agreed_8        1.000000       0.987310        0.990456        0.983518   \n",
       "oofs_agreed_9        0.987310       1.000000        0.984884        0.989911   \n",
       "oofs_agreed_10       0.990456       0.984884        1.000000        0.983689   \n",
       "oofs_agreed_11       0.983518       0.989911        0.983689        1.000000   \n",
       "oofs_agreed_12       0.984780       0.989481        0.987243        0.986204   \n",
       "oofs_agreed_13       0.985321       0.987805        0.989598        0.987806   \n",
       "oofs_agreed_14       0.987066       0.998649        0.984538        0.989436   \n",
       "oofs_agreed_15       0.990289       0.985065        0.995895        0.984225   \n",
       "oofs_agreed_16       0.997975       0.987453        0.991031        0.983439   \n",
       "oofs_agreed_17       0.984720       0.989079        0.987307        0.986123   \n",
       "oofs_agreed_18       0.985544       0.988042        0.990067        0.987201   \n",
       "\n",
       "                oofs_agreed_12  oofs_agreed_13  oofs_agreed_14  \\\n",
       "oofs_agreed_0         0.976050        0.976170        0.976952   \n",
       "oofs_agreed_1         0.978497        0.978545        0.977649   \n",
       "oofs_agreed_2         0.978022        0.977212        0.976467   \n",
       "oofs_agreed_3         0.983276        0.984887        0.978003   \n",
       "oofs_agreed_4         0.976735        0.976505        0.976157   \n",
       "oofs_agreed_5         0.976450        0.975633        0.974975   \n",
       "oofs_agreed_6         0.981879        0.983655        0.976346   \n",
       "oofs_agreed_7         0.979390        0.982267        0.982601   \n",
       "oofs_agreed_8         0.984780        0.985321        0.987066   \n",
       "oofs_agreed_9         0.989481        0.987805        0.998649   \n",
       "oofs_agreed_10        0.987243        0.989598        0.984538   \n",
       "oofs_agreed_11        0.986204        0.987806        0.989436   \n",
       "oofs_agreed_12        1.000000        0.991436        0.989118   \n",
       "oofs_agreed_13        0.991436        1.000000        0.987627   \n",
       "oofs_agreed_14        0.989118        0.987627        1.000000   \n",
       "oofs_agreed_15        0.987471        0.990288        0.984773   \n",
       "oofs_agreed_16        0.985098        0.985720        0.987139   \n",
       "oofs_agreed_17        0.998035        0.991746        0.988966   \n",
       "oofs_agreed_18        0.991292        0.996646        0.987761   \n",
       "\n",
       "                oofs_agreed_15  oofs_agreed_16  oofs_agreed_17  oofs_agreed_18  \n",
       "oofs_agreed_0         0.976174        0.975576        0.975719        0.976823  \n",
       "oofs_agreed_1         0.979854        0.979520        0.978561        0.979853  \n",
       "oofs_agreed_2         0.977935        0.976717        0.977700        0.977855  \n",
       "oofs_agreed_3         0.984594        0.979907        0.983532        0.985561  \n",
       "oofs_agreed_4         0.977627        0.977743        0.976696        0.977415  \n",
       "oofs_agreed_5         0.976913        0.975815        0.976077        0.976816  \n",
       "oofs_agreed_6         0.983055        0.978560        0.982419        0.984168  \n",
       "oofs_agreed_7         0.986865        0.989011        0.979822        0.983091  \n",
       "oofs_agreed_8         0.990289        0.997975        0.984720        0.985544  \n",
       "oofs_agreed_9         0.985065        0.987453        0.989079        0.988042  \n",
       "oofs_agreed_10        0.995895        0.991031        0.987307        0.990067  \n",
       "oofs_agreed_11        0.984225        0.983439        0.986123        0.987201  \n",
       "oofs_agreed_12        0.987471        0.985098        0.998035        0.991292  \n",
       "oofs_agreed_13        0.990288        0.985720        0.991746        0.996646  \n",
       "oofs_agreed_14        0.984773        0.987139        0.988966        0.987761  \n",
       "oofs_agreed_15        1.000000        0.991009        0.987715        0.990837  \n",
       "oofs_agreed_16        0.991009        1.000000        0.985160        0.986302  \n",
       "oofs_agreed_17        0.987715        0.985160        1.000000        0.991585  \n",
       "oofs_agreed_18        0.990837        0.986302        0.991585        1.000000  "
      ]
     },
     "execution_count": 43,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "agreeds.corr()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>oofs_disagreeds_0</th>\n",
       "      <th>oofs_disagreeds_1</th>\n",
       "      <th>oofs_disagreeds_2</th>\n",
       "      <th>oofs_disagreeds_3</th>\n",
       "      <th>oofs_disagreeds_4</th>\n",
       "      <th>oofs_disagreeds_5</th>\n",
       "      <th>oofs_disagreeds_6</th>\n",
       "      <th>oofs_disagreeds_7</th>\n",
       "      <th>oofs_disagreeds_8</th>\n",
       "      <th>oofs_disagreeds_9</th>\n",
       "      <th>oofs_disagreeds_10</th>\n",
       "      <th>oofs_disagreeds_11</th>\n",
       "      <th>oofs_disagreeds_12</th>\n",
       "      <th>oofs_disagreeds_13</th>\n",
       "      <th>oofs_disagreeds_14</th>\n",
       "      <th>oofs_disagreeds_15</th>\n",
       "      <th>oofs_disagreeds_16</th>\n",
       "      <th>oofs_disagreeds_17</th>\n",
       "      <th>oofs_disagreeds_18</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_0</th>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.956195</td>\n",
       "      <td>0.965381</td>\n",
       "      <td>0.967006</td>\n",
       "      <td>0.970607</td>\n",
       "      <td>0.973446</td>\n",
       "      <td>0.969427</td>\n",
       "      <td>0.947563</td>\n",
       "      <td>0.953924</td>\n",
       "      <td>0.953973</td>\n",
       "      <td>0.953546</td>\n",
       "      <td>0.955650</td>\n",
       "      <td>0.952962</td>\n",
       "      <td>0.952829</td>\n",
       "      <td>0.948303</td>\n",
       "      <td>0.949407</td>\n",
       "      <td>0.948455</td>\n",
       "      <td>0.949762</td>\n",
       "      <td>0.947914</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_1</th>\n",
       "      <td>0.956195</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.972663</td>\n",
       "      <td>0.969680</td>\n",
       "      <td>0.980875</td>\n",
       "      <td>0.961191</td>\n",
       "      <td>0.958854</td>\n",
       "      <td>0.940807</td>\n",
       "      <td>0.950593</td>\n",
       "      <td>0.941773</td>\n",
       "      <td>0.946255</td>\n",
       "      <td>0.942006</td>\n",
       "      <td>0.941758</td>\n",
       "      <td>0.944460</td>\n",
       "      <td>0.953687</td>\n",
       "      <td>0.960452</td>\n",
       "      <td>0.958069</td>\n",
       "      <td>0.955050</td>\n",
       "      <td>0.956554</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_2</th>\n",
       "      <td>0.965381</td>\n",
       "      <td>0.972663</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.973263</td>\n",
       "      <td>0.974988</td>\n",
       "      <td>0.984404</td>\n",
       "      <td>0.967257</td>\n",
       "      <td>0.938211</td>\n",
       "      <td>0.951614</td>\n",
       "      <td>0.948799</td>\n",
       "      <td>0.949472</td>\n",
       "      <td>0.945175</td>\n",
       "      <td>0.947688</td>\n",
       "      <td>0.950114</td>\n",
       "      <td>0.955320</td>\n",
       "      <td>0.955833</td>\n",
       "      <td>0.955834</td>\n",
       "      <td>0.956771</td>\n",
       "      <td>0.954623</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_3</th>\n",
       "      <td>0.967006</td>\n",
       "      <td>0.969680</td>\n",
       "      <td>0.973263</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.973872</td>\n",
       "      <td>0.970303</td>\n",
       "      <td>0.986330</td>\n",
       "      <td>0.955033</td>\n",
       "      <td>0.962220</td>\n",
       "      <td>0.955903</td>\n",
       "      <td>0.963340</td>\n",
       "      <td>0.957298</td>\n",
       "      <td>0.961192</td>\n",
       "      <td>0.963206</td>\n",
       "      <td>0.959059</td>\n",
       "      <td>0.969456</td>\n",
       "      <td>0.963931</td>\n",
       "      <td>0.967005</td>\n",
       "      <td>0.968558</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_4</th>\n",
       "      <td>0.970607</td>\n",
       "      <td>0.980875</td>\n",
       "      <td>0.974988</td>\n",
       "      <td>0.973872</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.976251</td>\n",
       "      <td>0.972630</td>\n",
       "      <td>0.948467</td>\n",
       "      <td>0.960164</td>\n",
       "      <td>0.957989</td>\n",
       "      <td>0.960040</td>\n",
       "      <td>0.955228</td>\n",
       "      <td>0.957282</td>\n",
       "      <td>0.957418</td>\n",
       "      <td>0.956558</td>\n",
       "      <td>0.959968</td>\n",
       "      <td>0.958002</td>\n",
       "      <td>0.959996</td>\n",
       "      <td>0.956790</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_5</th>\n",
       "      <td>0.973446</td>\n",
       "      <td>0.961191</td>\n",
       "      <td>0.984404</td>\n",
       "      <td>0.970303</td>\n",
       "      <td>0.976251</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.971418</td>\n",
       "      <td>0.938822</td>\n",
       "      <td>0.953536</td>\n",
       "      <td>0.955377</td>\n",
       "      <td>0.955229</td>\n",
       "      <td>0.950740</td>\n",
       "      <td>0.954774</td>\n",
       "      <td>0.956343</td>\n",
       "      <td>0.949685</td>\n",
       "      <td>0.951494</td>\n",
       "      <td>0.949397</td>\n",
       "      <td>0.953315</td>\n",
       "      <td>0.948816</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_6</th>\n",
       "      <td>0.969427</td>\n",
       "      <td>0.958854</td>\n",
       "      <td>0.967257</td>\n",
       "      <td>0.986330</td>\n",
       "      <td>0.972630</td>\n",
       "      <td>0.971418</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.954179</td>\n",
       "      <td>0.960763</td>\n",
       "      <td>0.957241</td>\n",
       "      <td>0.964684</td>\n",
       "      <td>0.959270</td>\n",
       "      <td>0.963397</td>\n",
       "      <td>0.965154</td>\n",
       "      <td>0.951198</td>\n",
       "      <td>0.961519</td>\n",
       "      <td>0.955037</td>\n",
       "      <td>0.960886</td>\n",
       "      <td>0.960835</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_7</th>\n",
       "      <td>0.947563</td>\n",
       "      <td>0.940807</td>\n",
       "      <td>0.938211</td>\n",
       "      <td>0.955033</td>\n",
       "      <td>0.948467</td>\n",
       "      <td>0.938822</td>\n",
       "      <td>0.954179</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.975049</td>\n",
       "      <td>0.961024</td>\n",
       "      <td>0.967412</td>\n",
       "      <td>0.970211</td>\n",
       "      <td>0.958116</td>\n",
       "      <td>0.962381</td>\n",
       "      <td>0.960166</td>\n",
       "      <td>0.969288</td>\n",
       "      <td>0.969771</td>\n",
       "      <td>0.959363</td>\n",
       "      <td>0.964948</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_8</th>\n",
       "      <td>0.953924</td>\n",
       "      <td>0.950593</td>\n",
       "      <td>0.951614</td>\n",
       "      <td>0.962220</td>\n",
       "      <td>0.960164</td>\n",
       "      <td>0.953536</td>\n",
       "      <td>0.960763</td>\n",
       "      <td>0.975049</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.976337</td>\n",
       "      <td>0.977972</td>\n",
       "      <td>0.971753</td>\n",
       "      <td>0.971154</td>\n",
       "      <td>0.973719</td>\n",
       "      <td>0.972987</td>\n",
       "      <td>0.978265</td>\n",
       "      <td>0.990127</td>\n",
       "      <td>0.972071</td>\n",
       "      <td>0.971182</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_9</th>\n",
       "      <td>0.953973</td>\n",
       "      <td>0.941773</td>\n",
       "      <td>0.948799</td>\n",
       "      <td>0.955903</td>\n",
       "      <td>0.957989</td>\n",
       "      <td>0.955377</td>\n",
       "      <td>0.957241</td>\n",
       "      <td>0.961024</td>\n",
       "      <td>0.976337</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.972073</td>\n",
       "      <td>0.978640</td>\n",
       "      <td>0.978412</td>\n",
       "      <td>0.978475</td>\n",
       "      <td>0.987397</td>\n",
       "      <td>0.966143</td>\n",
       "      <td>0.969127</td>\n",
       "      <td>0.976464</td>\n",
       "      <td>0.970646</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_10</th>\n",
       "      <td>0.953546</td>\n",
       "      <td>0.946255</td>\n",
       "      <td>0.949472</td>\n",
       "      <td>0.963340</td>\n",
       "      <td>0.960040</td>\n",
       "      <td>0.955229</td>\n",
       "      <td>0.964684</td>\n",
       "      <td>0.967412</td>\n",
       "      <td>0.977972</td>\n",
       "      <td>0.972073</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.970571</td>\n",
       "      <td>0.973727</td>\n",
       "      <td>0.978318</td>\n",
       "      <td>0.963091</td>\n",
       "      <td>0.981756</td>\n",
       "      <td>0.971032</td>\n",
       "      <td>0.969401</td>\n",
       "      <td>0.970960</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_11</th>\n",
       "      <td>0.955650</td>\n",
       "      <td>0.942006</td>\n",
       "      <td>0.945175</td>\n",
       "      <td>0.957298</td>\n",
       "      <td>0.955228</td>\n",
       "      <td>0.950740</td>\n",
       "      <td>0.959270</td>\n",
       "      <td>0.970211</td>\n",
       "      <td>0.971753</td>\n",
       "      <td>0.978640</td>\n",
       "      <td>0.970571</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.975214</td>\n",
       "      <td>0.977735</td>\n",
       "      <td>0.968885</td>\n",
       "      <td>0.965114</td>\n",
       "      <td>0.963684</td>\n",
       "      <td>0.970969</td>\n",
       "      <td>0.968152</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_12</th>\n",
       "      <td>0.952962</td>\n",
       "      <td>0.941758</td>\n",
       "      <td>0.947688</td>\n",
       "      <td>0.961192</td>\n",
       "      <td>0.957282</td>\n",
       "      <td>0.954774</td>\n",
       "      <td>0.963397</td>\n",
       "      <td>0.958116</td>\n",
       "      <td>0.971154</td>\n",
       "      <td>0.978412</td>\n",
       "      <td>0.973727</td>\n",
       "      <td>0.975214</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.981216</td>\n",
       "      <td>0.968082</td>\n",
       "      <td>0.966762</td>\n",
       "      <td>0.963199</td>\n",
       "      <td>0.987711</td>\n",
       "      <td>0.970934</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_13</th>\n",
       "      <td>0.952829</td>\n",
       "      <td>0.944460</td>\n",
       "      <td>0.950114</td>\n",
       "      <td>0.963206</td>\n",
       "      <td>0.957418</td>\n",
       "      <td>0.956343</td>\n",
       "      <td>0.965154</td>\n",
       "      <td>0.962381</td>\n",
       "      <td>0.973719</td>\n",
       "      <td>0.978475</td>\n",
       "      <td>0.978318</td>\n",
       "      <td>0.977735</td>\n",
       "      <td>0.981216</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.969075</td>\n",
       "      <td>0.972275</td>\n",
       "      <td>0.966843</td>\n",
       "      <td>0.977263</td>\n",
       "      <td>0.980874</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_14</th>\n",
       "      <td>0.948303</td>\n",
       "      <td>0.953687</td>\n",
       "      <td>0.955320</td>\n",
       "      <td>0.959059</td>\n",
       "      <td>0.956558</td>\n",
       "      <td>0.949685</td>\n",
       "      <td>0.951198</td>\n",
       "      <td>0.960166</td>\n",
       "      <td>0.972987</td>\n",
       "      <td>0.987397</td>\n",
       "      <td>0.963091</td>\n",
       "      <td>0.968885</td>\n",
       "      <td>0.968082</td>\n",
       "      <td>0.969075</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.972104</td>\n",
       "      <td>0.977651</td>\n",
       "      <td>0.980593</td>\n",
       "      <td>0.978070</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_15</th>\n",
       "      <td>0.949407</td>\n",
       "      <td>0.960452</td>\n",
       "      <td>0.955833</td>\n",
       "      <td>0.969456</td>\n",
       "      <td>0.959968</td>\n",
       "      <td>0.951494</td>\n",
       "      <td>0.961519</td>\n",
       "      <td>0.969288</td>\n",
       "      <td>0.978265</td>\n",
       "      <td>0.966143</td>\n",
       "      <td>0.981756</td>\n",
       "      <td>0.965114</td>\n",
       "      <td>0.966762</td>\n",
       "      <td>0.972275</td>\n",
       "      <td>0.972104</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.982537</td>\n",
       "      <td>0.975701</td>\n",
       "      <td>0.980916</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_16</th>\n",
       "      <td>0.948455</td>\n",
       "      <td>0.958069</td>\n",
       "      <td>0.955834</td>\n",
       "      <td>0.963931</td>\n",
       "      <td>0.958002</td>\n",
       "      <td>0.949397</td>\n",
       "      <td>0.955037</td>\n",
       "      <td>0.969771</td>\n",
       "      <td>0.990127</td>\n",
       "      <td>0.969127</td>\n",
       "      <td>0.971032</td>\n",
       "      <td>0.963684</td>\n",
       "      <td>0.963199</td>\n",
       "      <td>0.966843</td>\n",
       "      <td>0.977651</td>\n",
       "      <td>0.982537</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.973746</td>\n",
       "      <td>0.975964</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_17</th>\n",
       "      <td>0.949762</td>\n",
       "      <td>0.955050</td>\n",
       "      <td>0.956771</td>\n",
       "      <td>0.967005</td>\n",
       "      <td>0.959996</td>\n",
       "      <td>0.953315</td>\n",
       "      <td>0.960886</td>\n",
       "      <td>0.959363</td>\n",
       "      <td>0.972071</td>\n",
       "      <td>0.976464</td>\n",
       "      <td>0.969401</td>\n",
       "      <td>0.970969</td>\n",
       "      <td>0.987711</td>\n",
       "      <td>0.977263</td>\n",
       "      <td>0.980593</td>\n",
       "      <td>0.975701</td>\n",
       "      <td>0.973746</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.981802</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_18</th>\n",
       "      <td>0.947914</td>\n",
       "      <td>0.956554</td>\n",
       "      <td>0.954623</td>\n",
       "      <td>0.968558</td>\n",
       "      <td>0.956790</td>\n",
       "      <td>0.948816</td>\n",
       "      <td>0.960835</td>\n",
       "      <td>0.964948</td>\n",
       "      <td>0.971182</td>\n",
       "      <td>0.970646</td>\n",
       "      <td>0.970960</td>\n",
       "      <td>0.968152</td>\n",
       "      <td>0.970934</td>\n",
       "      <td>0.980874</td>\n",
       "      <td>0.978070</td>\n",
       "      <td>0.980916</td>\n",
       "      <td>0.975964</td>\n",
       "      <td>0.981802</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                    oofs_disagreeds_0  oofs_disagreeds_1  oofs_disagreeds_2  \\\n",
       "oofs_disagreeds_0            1.000000           0.956195           0.965381   \n",
       "oofs_disagreeds_1            0.956195           1.000000           0.972663   \n",
       "oofs_disagreeds_2            0.965381           0.972663           1.000000   \n",
       "oofs_disagreeds_3            0.967006           0.969680           0.973263   \n",
       "oofs_disagreeds_4            0.970607           0.980875           0.974988   \n",
       "oofs_disagreeds_5            0.973446           0.961191           0.984404   \n",
       "oofs_disagreeds_6            0.969427           0.958854           0.967257   \n",
       "oofs_disagreeds_7            0.947563           0.940807           0.938211   \n",
       "oofs_disagreeds_8            0.953924           0.950593           0.951614   \n",
       "oofs_disagreeds_9            0.953973           0.941773           0.948799   \n",
       "oofs_disagreeds_10           0.953546           0.946255           0.949472   \n",
       "oofs_disagreeds_11           0.955650           0.942006           0.945175   \n",
       "oofs_disagreeds_12           0.952962           0.941758           0.947688   \n",
       "oofs_disagreeds_13           0.952829           0.944460           0.950114   \n",
       "oofs_disagreeds_14           0.948303           0.953687           0.955320   \n",
       "oofs_disagreeds_15           0.949407           0.960452           0.955833   \n",
       "oofs_disagreeds_16           0.948455           0.958069           0.955834   \n",
       "oofs_disagreeds_17           0.949762           0.955050           0.956771   \n",
       "oofs_disagreeds_18           0.947914           0.956554           0.954623   \n",
       "\n",
       "                    oofs_disagreeds_3  oofs_disagreeds_4  oofs_disagreeds_5  \\\n",
       "oofs_disagreeds_0            0.967006           0.970607           0.973446   \n",
       "oofs_disagreeds_1            0.969680           0.980875           0.961191   \n",
       "oofs_disagreeds_2            0.973263           0.974988           0.984404   \n",
       "oofs_disagreeds_3            1.000000           0.973872           0.970303   \n",
       "oofs_disagreeds_4            0.973872           1.000000           0.976251   \n",
       "oofs_disagreeds_5            0.970303           0.976251           1.000000   \n",
       "oofs_disagreeds_6            0.986330           0.972630           0.971418   \n",
       "oofs_disagreeds_7            0.955033           0.948467           0.938822   \n",
       "oofs_disagreeds_8            0.962220           0.960164           0.953536   \n",
       "oofs_disagreeds_9            0.955903           0.957989           0.955377   \n",
       "oofs_disagreeds_10           0.963340           0.960040           0.955229   \n",
       "oofs_disagreeds_11           0.957298           0.955228           0.950740   \n",
       "oofs_disagreeds_12           0.961192           0.957282           0.954774   \n",
       "oofs_disagreeds_13           0.963206           0.957418           0.956343   \n",
       "oofs_disagreeds_14           0.959059           0.956558           0.949685   \n",
       "oofs_disagreeds_15           0.969456           0.959968           0.951494   \n",
       "oofs_disagreeds_16           0.963931           0.958002           0.949397   \n",
       "oofs_disagreeds_17           0.967005           0.959996           0.953315   \n",
       "oofs_disagreeds_18           0.968558           0.956790           0.948816   \n",
       "\n",
       "                    oofs_disagreeds_6  oofs_disagreeds_7  oofs_disagreeds_8  \\\n",
       "oofs_disagreeds_0            0.969427           0.947563           0.953924   \n",
       "oofs_disagreeds_1            0.958854           0.940807           0.950593   \n",
       "oofs_disagreeds_2            0.967257           0.938211           0.951614   \n",
       "oofs_disagreeds_3            0.986330           0.955033           0.962220   \n",
       "oofs_disagreeds_4            0.972630           0.948467           0.960164   \n",
       "oofs_disagreeds_5            0.971418           0.938822           0.953536   \n",
       "oofs_disagreeds_6            1.000000           0.954179           0.960763   \n",
       "oofs_disagreeds_7            0.954179           1.000000           0.975049   \n",
       "oofs_disagreeds_8            0.960763           0.975049           1.000000   \n",
       "oofs_disagreeds_9            0.957241           0.961024           0.976337   \n",
       "oofs_disagreeds_10           0.964684           0.967412           0.977972   \n",
       "oofs_disagreeds_11           0.959270           0.970211           0.971753   \n",
       "oofs_disagreeds_12           0.963397           0.958116           0.971154   \n",
       "oofs_disagreeds_13           0.965154           0.962381           0.973719   \n",
       "oofs_disagreeds_14           0.951198           0.960166           0.972987   \n",
       "oofs_disagreeds_15           0.961519           0.969288           0.978265   \n",
       "oofs_disagreeds_16           0.955037           0.969771           0.990127   \n",
       "oofs_disagreeds_17           0.960886           0.959363           0.972071   \n",
       "oofs_disagreeds_18           0.960835           0.964948           0.971182   \n",
       "\n",
       "                    oofs_disagreeds_9  oofs_disagreeds_10  oofs_disagreeds_11  \\\n",
       "oofs_disagreeds_0            0.953973            0.953546            0.955650   \n",
       "oofs_disagreeds_1            0.941773            0.946255            0.942006   \n",
       "oofs_disagreeds_2            0.948799            0.949472            0.945175   \n",
       "oofs_disagreeds_3            0.955903            0.963340            0.957298   \n",
       "oofs_disagreeds_4            0.957989            0.960040            0.955228   \n",
       "oofs_disagreeds_5            0.955377            0.955229            0.950740   \n",
       "oofs_disagreeds_6            0.957241            0.964684            0.959270   \n",
       "oofs_disagreeds_7            0.961024            0.967412            0.970211   \n",
       "oofs_disagreeds_8            0.976337            0.977972            0.971753   \n",
       "oofs_disagreeds_9            1.000000            0.972073            0.978640   \n",
       "oofs_disagreeds_10           0.972073            1.000000            0.970571   \n",
       "oofs_disagreeds_11           0.978640            0.970571            1.000000   \n",
       "oofs_disagreeds_12           0.978412            0.973727            0.975214   \n",
       "oofs_disagreeds_13           0.978475            0.978318            0.977735   \n",
       "oofs_disagreeds_14           0.987397            0.963091            0.968885   \n",
       "oofs_disagreeds_15           0.966143            0.981756            0.965114   \n",
       "oofs_disagreeds_16           0.969127            0.971032            0.963684   \n",
       "oofs_disagreeds_17           0.976464            0.969401            0.970969   \n",
       "oofs_disagreeds_18           0.970646            0.970960            0.968152   \n",
       "\n",
       "                    oofs_disagreeds_12  oofs_disagreeds_13  \\\n",
       "oofs_disagreeds_0             0.952962            0.952829   \n",
       "oofs_disagreeds_1             0.941758            0.944460   \n",
       "oofs_disagreeds_2             0.947688            0.950114   \n",
       "oofs_disagreeds_3             0.961192            0.963206   \n",
       "oofs_disagreeds_4             0.957282            0.957418   \n",
       "oofs_disagreeds_5             0.954774            0.956343   \n",
       "oofs_disagreeds_6             0.963397            0.965154   \n",
       "oofs_disagreeds_7             0.958116            0.962381   \n",
       "oofs_disagreeds_8             0.971154            0.973719   \n",
       "oofs_disagreeds_9             0.978412            0.978475   \n",
       "oofs_disagreeds_10            0.973727            0.978318   \n",
       "oofs_disagreeds_11            0.975214            0.977735   \n",
       "oofs_disagreeds_12            1.000000            0.981216   \n",
       "oofs_disagreeds_13            0.981216            1.000000   \n",
       "oofs_disagreeds_14            0.968082            0.969075   \n",
       "oofs_disagreeds_15            0.966762            0.972275   \n",
       "oofs_disagreeds_16            0.963199            0.966843   \n",
       "oofs_disagreeds_17            0.987711            0.977263   \n",
       "oofs_disagreeds_18            0.970934            0.980874   \n",
       "\n",
       "                    oofs_disagreeds_14  oofs_disagreeds_15  \\\n",
       "oofs_disagreeds_0             0.948303            0.949407   \n",
       "oofs_disagreeds_1             0.953687            0.960452   \n",
       "oofs_disagreeds_2             0.955320            0.955833   \n",
       "oofs_disagreeds_3             0.959059            0.969456   \n",
       "oofs_disagreeds_4             0.956558            0.959968   \n",
       "oofs_disagreeds_5             0.949685            0.951494   \n",
       "oofs_disagreeds_6             0.951198            0.961519   \n",
       "oofs_disagreeds_7             0.960166            0.969288   \n",
       "oofs_disagreeds_8             0.972987            0.978265   \n",
       "oofs_disagreeds_9             0.987397            0.966143   \n",
       "oofs_disagreeds_10            0.963091            0.981756   \n",
       "oofs_disagreeds_11            0.968885            0.965114   \n",
       "oofs_disagreeds_12            0.968082            0.966762   \n",
       "oofs_disagreeds_13            0.969075            0.972275   \n",
       "oofs_disagreeds_14            1.000000            0.972104   \n",
       "oofs_disagreeds_15            0.972104            1.000000   \n",
       "oofs_disagreeds_16            0.977651            0.982537   \n",
       "oofs_disagreeds_17            0.980593            0.975701   \n",
       "oofs_disagreeds_18            0.978070            0.980916   \n",
       "\n",
       "                    oofs_disagreeds_16  oofs_disagreeds_17  oofs_disagreeds_18  \n",
       "oofs_disagreeds_0             0.948455            0.949762            0.947914  \n",
       "oofs_disagreeds_1             0.958069            0.955050            0.956554  \n",
       "oofs_disagreeds_2             0.955834            0.956771            0.954623  \n",
       "oofs_disagreeds_3             0.963931            0.967005            0.968558  \n",
       "oofs_disagreeds_4             0.958002            0.959996            0.956790  \n",
       "oofs_disagreeds_5             0.949397            0.953315            0.948816  \n",
       "oofs_disagreeds_6             0.955037            0.960886            0.960835  \n",
       "oofs_disagreeds_7             0.969771            0.959363            0.964948  \n",
       "oofs_disagreeds_8             0.990127            0.972071            0.971182  \n",
       "oofs_disagreeds_9             0.969127            0.976464            0.970646  \n",
       "oofs_disagreeds_10            0.971032            0.969401            0.970960  \n",
       "oofs_disagreeds_11            0.963684            0.970969            0.968152  \n",
       "oofs_disagreeds_12            0.963199            0.987711            0.970934  \n",
       "oofs_disagreeds_13            0.966843            0.977263            0.980874  \n",
       "oofs_disagreeds_14            0.977651            0.980593            0.978070  \n",
       "oofs_disagreeds_15            0.982537            0.975701            0.980916  \n",
       "oofs_disagreeds_16            1.000000            0.973746            0.975964  \n",
       "oofs_disagreeds_17            0.973746            1.000000            0.981802  \n",
       "oofs_disagreeds_18            0.975964            0.981802            1.000000  "
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "disagreeds.corr()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>oofs_unrelated_0</th>\n",
       "      <th>oofs_unrelated_1</th>\n",
       "      <th>oofs_unrelated_2</th>\n",
       "      <th>oofs_unrelated_3</th>\n",
       "      <th>oofs_unrelated_4</th>\n",
       "      <th>oofs_unrelated_5</th>\n",
       "      <th>oofs_unrelated_6</th>\n",
       "      <th>oofs_unrelated_7</th>\n",
       "      <th>oofs_unrelated_8</th>\n",
       "      <th>oofs_unrelated_9</th>\n",
       "      <th>oofs_unrelated_10</th>\n",
       "      <th>oofs_unrelated_11</th>\n",
       "      <th>oofs_unrelated_12</th>\n",
       "      <th>oofs_unrelated_13</th>\n",
       "      <th>oofs_unrelated_14</th>\n",
       "      <th>oofs_unrelated_15</th>\n",
       "      <th>oofs_unrelated_16</th>\n",
       "      <th>oofs_unrelated_17</th>\n",
       "      <th>oofs_unrelated_18</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_0</th>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.979483</td>\n",
       "      <td>0.982918</td>\n",
       "      <td>0.982116</td>\n",
       "      <td>0.980407</td>\n",
       "      <td>0.983841</td>\n",
       "      <td>0.982145</td>\n",
       "      <td>0.967810</td>\n",
       "      <td>0.969136</td>\n",
       "      <td>0.971615</td>\n",
       "      <td>0.969398</td>\n",
       "      <td>0.971206</td>\n",
       "      <td>0.969555</td>\n",
       "      <td>0.970029</td>\n",
       "      <td>0.970348</td>\n",
       "      <td>0.969500</td>\n",
       "      <td>0.968974</td>\n",
       "      <td>0.969428</td>\n",
       "      <td>0.970400</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_1</th>\n",
       "      <td>0.979483</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.985805</td>\n",
       "      <td>0.983235</td>\n",
       "      <td>0.993880</td>\n",
       "      <td>0.983194</td>\n",
       "      <td>0.981397</td>\n",
       "      <td>0.970652</td>\n",
       "      <td>0.972543</td>\n",
       "      <td>0.969819</td>\n",
       "      <td>0.972554</td>\n",
       "      <td>0.966131</td>\n",
       "      <td>0.969558</td>\n",
       "      <td>0.970829</td>\n",
       "      <td>0.973004</td>\n",
       "      <td>0.975851</td>\n",
       "      <td>0.975135</td>\n",
       "      <td>0.973627</td>\n",
       "      <td>0.975486</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_2</th>\n",
       "      <td>0.982918</td>\n",
       "      <td>0.985805</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.984631</td>\n",
       "      <td>0.985170</td>\n",
       "      <td>0.992641</td>\n",
       "      <td>0.983242</td>\n",
       "      <td>0.964436</td>\n",
       "      <td>0.969656</td>\n",
       "      <td>0.968727</td>\n",
       "      <td>0.970621</td>\n",
       "      <td>0.965539</td>\n",
       "      <td>0.969039</td>\n",
       "      <td>0.969442</td>\n",
       "      <td>0.971613</td>\n",
       "      <td>0.973276</td>\n",
       "      <td>0.971808</td>\n",
       "      <td>0.972618</td>\n",
       "      <td>0.973165</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_3</th>\n",
       "      <td>0.982116</td>\n",
       "      <td>0.983235</td>\n",
       "      <td>0.984631</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.981340</td>\n",
       "      <td>0.981809</td>\n",
       "      <td>0.994782</td>\n",
       "      <td>0.971740</td>\n",
       "      <td>0.974504</td>\n",
       "      <td>0.972338</td>\n",
       "      <td>0.978712</td>\n",
       "      <td>0.973816</td>\n",
       "      <td>0.977123</td>\n",
       "      <td>0.979705</td>\n",
       "      <td>0.973593</td>\n",
       "      <td>0.981349</td>\n",
       "      <td>0.975971</td>\n",
       "      <td>0.979870</td>\n",
       "      <td>0.982335</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_4</th>\n",
       "      <td>0.980407</td>\n",
       "      <td>0.993880</td>\n",
       "      <td>0.985170</td>\n",
       "      <td>0.981340</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.985285</td>\n",
       "      <td>0.980849</td>\n",
       "      <td>0.969350</td>\n",
       "      <td>0.972134</td>\n",
       "      <td>0.970553</td>\n",
       "      <td>0.971981</td>\n",
       "      <td>0.965403</td>\n",
       "      <td>0.970200</td>\n",
       "      <td>0.970659</td>\n",
       "      <td>0.971251</td>\n",
       "      <td>0.973084</td>\n",
       "      <td>0.973036</td>\n",
       "      <td>0.972064</td>\n",
       "      <td>0.972748</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_5</th>\n",
       "      <td>0.983841</td>\n",
       "      <td>0.983194</td>\n",
       "      <td>0.992641</td>\n",
       "      <td>0.981809</td>\n",
       "      <td>0.985285</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.982544</td>\n",
       "      <td>0.964530</td>\n",
       "      <td>0.968744</td>\n",
       "      <td>0.969367</td>\n",
       "      <td>0.970257</td>\n",
       "      <td>0.964712</td>\n",
       "      <td>0.970017</td>\n",
       "      <td>0.969717</td>\n",
       "      <td>0.968046</td>\n",
       "      <td>0.970220</td>\n",
       "      <td>0.969092</td>\n",
       "      <td>0.969949</td>\n",
       "      <td>0.970316</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_6</th>\n",
       "      <td>0.982145</td>\n",
       "      <td>0.981397</td>\n",
       "      <td>0.983242</td>\n",
       "      <td>0.994782</td>\n",
       "      <td>0.980849</td>\n",
       "      <td>0.982544</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.970761</td>\n",
       "      <td>0.973068</td>\n",
       "      <td>0.971318</td>\n",
       "      <td>0.977646</td>\n",
       "      <td>0.972614</td>\n",
       "      <td>0.976537</td>\n",
       "      <td>0.979104</td>\n",
       "      <td>0.970859</td>\n",
       "      <td>0.978754</td>\n",
       "      <td>0.973566</td>\n",
       "      <td>0.978102</td>\n",
       "      <td>0.979990</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_7</th>\n",
       "      <td>0.967810</td>\n",
       "      <td>0.970652</td>\n",
       "      <td>0.964436</td>\n",
       "      <td>0.971740</td>\n",
       "      <td>0.969350</td>\n",
       "      <td>0.964530</td>\n",
       "      <td>0.970761</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.986337</td>\n",
       "      <td>0.978792</td>\n",
       "      <td>0.983965</td>\n",
       "      <td>0.980390</td>\n",
       "      <td>0.974268</td>\n",
       "      <td>0.978164</td>\n",
       "      <td>0.978848</td>\n",
       "      <td>0.983940</td>\n",
       "      <td>0.986260</td>\n",
       "      <td>0.975830</td>\n",
       "      <td>0.979610</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_8</th>\n",
       "      <td>0.969136</td>\n",
       "      <td>0.972543</td>\n",
       "      <td>0.969656</td>\n",
       "      <td>0.974504</td>\n",
       "      <td>0.972134</td>\n",
       "      <td>0.968744</td>\n",
       "      <td>0.973068</td>\n",
       "      <td>0.986337</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.984056</td>\n",
       "      <td>0.987820</td>\n",
       "      <td>0.979701</td>\n",
       "      <td>0.980506</td>\n",
       "      <td>0.981747</td>\n",
       "      <td>0.983653</td>\n",
       "      <td>0.987454</td>\n",
       "      <td>0.996736</td>\n",
       "      <td>0.981314</td>\n",
       "      <td>0.982084</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_9</th>\n",
       "      <td>0.971615</td>\n",
       "      <td>0.969819</td>\n",
       "      <td>0.968727</td>\n",
       "      <td>0.972338</td>\n",
       "      <td>0.970553</td>\n",
       "      <td>0.969367</td>\n",
       "      <td>0.971318</td>\n",
       "      <td>0.978792</td>\n",
       "      <td>0.984056</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.981037</td>\n",
       "      <td>0.987146</td>\n",
       "      <td>0.986571</td>\n",
       "      <td>0.984970</td>\n",
       "      <td>0.996133</td>\n",
       "      <td>0.979866</td>\n",
       "      <td>0.982942</td>\n",
       "      <td>0.985613</td>\n",
       "      <td>0.983777</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_10</th>\n",
       "      <td>0.969398</td>\n",
       "      <td>0.972554</td>\n",
       "      <td>0.970621</td>\n",
       "      <td>0.978712</td>\n",
       "      <td>0.971981</td>\n",
       "      <td>0.970257</td>\n",
       "      <td>0.977646</td>\n",
       "      <td>0.983965</td>\n",
       "      <td>0.987820</td>\n",
       "      <td>0.981037</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.979647</td>\n",
       "      <td>0.983315</td>\n",
       "      <td>0.986668</td>\n",
       "      <td>0.980061</td>\n",
       "      <td>0.993637</td>\n",
       "      <td>0.987707</td>\n",
       "      <td>0.983753</td>\n",
       "      <td>0.986783</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_11</th>\n",
       "      <td>0.971206</td>\n",
       "      <td>0.966131</td>\n",
       "      <td>0.965539</td>\n",
       "      <td>0.973816</td>\n",
       "      <td>0.965403</td>\n",
       "      <td>0.964712</td>\n",
       "      <td>0.972614</td>\n",
       "      <td>0.980390</td>\n",
       "      <td>0.979701</td>\n",
       "      <td>0.987146</td>\n",
       "      <td>0.979647</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.982623</td>\n",
       "      <td>0.984743</td>\n",
       "      <td>0.985059</td>\n",
       "      <td>0.979124</td>\n",
       "      <td>0.978441</td>\n",
       "      <td>0.982137</td>\n",
       "      <td>0.982855</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_12</th>\n",
       "      <td>0.969555</td>\n",
       "      <td>0.969558</td>\n",
       "      <td>0.969039</td>\n",
       "      <td>0.977123</td>\n",
       "      <td>0.970200</td>\n",
       "      <td>0.970017</td>\n",
       "      <td>0.976537</td>\n",
       "      <td>0.974268</td>\n",
       "      <td>0.980506</td>\n",
       "      <td>0.986571</td>\n",
       "      <td>0.983315</td>\n",
       "      <td>0.982623</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.988826</td>\n",
       "      <td>0.983774</td>\n",
       "      <td>0.981790</td>\n",
       "      <td>0.979319</td>\n",
       "      <td>0.995708</td>\n",
       "      <td>0.986629</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_13</th>\n",
       "      <td>0.970029</td>\n",
       "      <td>0.970829</td>\n",
       "      <td>0.969442</td>\n",
       "      <td>0.979705</td>\n",
       "      <td>0.970659</td>\n",
       "      <td>0.969717</td>\n",
       "      <td>0.979104</td>\n",
       "      <td>0.978164</td>\n",
       "      <td>0.981747</td>\n",
       "      <td>0.984970</td>\n",
       "      <td>0.986668</td>\n",
       "      <td>0.984743</td>\n",
       "      <td>0.988826</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.983028</td>\n",
       "      <td>0.986013</td>\n",
       "      <td>0.981009</td>\n",
       "      <td>0.988582</td>\n",
       "      <td>0.993820</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_14</th>\n",
       "      <td>0.970348</td>\n",
       "      <td>0.973004</td>\n",
       "      <td>0.971613</td>\n",
       "      <td>0.973593</td>\n",
       "      <td>0.971251</td>\n",
       "      <td>0.968046</td>\n",
       "      <td>0.970859</td>\n",
       "      <td>0.978848</td>\n",
       "      <td>0.983653</td>\n",
       "      <td>0.996133</td>\n",
       "      <td>0.980061</td>\n",
       "      <td>0.985059</td>\n",
       "      <td>0.983774</td>\n",
       "      <td>0.983028</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.981758</td>\n",
       "      <td>0.984669</td>\n",
       "      <td>0.986677</td>\n",
       "      <td>0.985435</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_15</th>\n",
       "      <td>0.969500</td>\n",
       "      <td>0.975851</td>\n",
       "      <td>0.973276</td>\n",
       "      <td>0.981349</td>\n",
       "      <td>0.973084</td>\n",
       "      <td>0.970220</td>\n",
       "      <td>0.978754</td>\n",
       "      <td>0.983940</td>\n",
       "      <td>0.987454</td>\n",
       "      <td>0.979866</td>\n",
       "      <td>0.993637</td>\n",
       "      <td>0.979124</td>\n",
       "      <td>0.981790</td>\n",
       "      <td>0.986013</td>\n",
       "      <td>0.981758</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.989113</td>\n",
       "      <td>0.984935</td>\n",
       "      <td>0.988874</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_16</th>\n",
       "      <td>0.968974</td>\n",
       "      <td>0.975135</td>\n",
       "      <td>0.971808</td>\n",
       "      <td>0.975971</td>\n",
       "      <td>0.973036</td>\n",
       "      <td>0.969092</td>\n",
       "      <td>0.973566</td>\n",
       "      <td>0.986260</td>\n",
       "      <td>0.996736</td>\n",
       "      <td>0.982942</td>\n",
       "      <td>0.987707</td>\n",
       "      <td>0.978441</td>\n",
       "      <td>0.979319</td>\n",
       "      <td>0.981009</td>\n",
       "      <td>0.984669</td>\n",
       "      <td>0.989113</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.982159</td>\n",
       "      <td>0.983709</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_17</th>\n",
       "      <td>0.969428</td>\n",
       "      <td>0.973627</td>\n",
       "      <td>0.972618</td>\n",
       "      <td>0.979870</td>\n",
       "      <td>0.972064</td>\n",
       "      <td>0.969949</td>\n",
       "      <td>0.978102</td>\n",
       "      <td>0.975830</td>\n",
       "      <td>0.981314</td>\n",
       "      <td>0.985613</td>\n",
       "      <td>0.983753</td>\n",
       "      <td>0.982137</td>\n",
       "      <td>0.995708</td>\n",
       "      <td>0.988582</td>\n",
       "      <td>0.986677</td>\n",
       "      <td>0.984935</td>\n",
       "      <td>0.982159</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.989607</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_18</th>\n",
       "      <td>0.970400</td>\n",
       "      <td>0.975486</td>\n",
       "      <td>0.973165</td>\n",
       "      <td>0.982335</td>\n",
       "      <td>0.972748</td>\n",
       "      <td>0.970316</td>\n",
       "      <td>0.979990</td>\n",
       "      <td>0.979610</td>\n",
       "      <td>0.982084</td>\n",
       "      <td>0.983777</td>\n",
       "      <td>0.986783</td>\n",
       "      <td>0.982855</td>\n",
       "      <td>0.986629</td>\n",
       "      <td>0.993820</td>\n",
       "      <td>0.985435</td>\n",
       "      <td>0.988874</td>\n",
       "      <td>0.983709</td>\n",
       "      <td>0.989607</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                   oofs_unrelated_0  oofs_unrelated_1  oofs_unrelated_2  \\\n",
       "oofs_unrelated_0           1.000000          0.979483          0.982918   \n",
       "oofs_unrelated_1           0.979483          1.000000          0.985805   \n",
       "oofs_unrelated_2           0.982918          0.985805          1.000000   \n",
       "oofs_unrelated_3           0.982116          0.983235          0.984631   \n",
       "oofs_unrelated_4           0.980407          0.993880          0.985170   \n",
       "oofs_unrelated_5           0.983841          0.983194          0.992641   \n",
       "oofs_unrelated_6           0.982145          0.981397          0.983242   \n",
       "oofs_unrelated_7           0.967810          0.970652          0.964436   \n",
       "oofs_unrelated_8           0.969136          0.972543          0.969656   \n",
       "oofs_unrelated_9           0.971615          0.969819          0.968727   \n",
       "oofs_unrelated_10          0.969398          0.972554          0.970621   \n",
       "oofs_unrelated_11          0.971206          0.966131          0.965539   \n",
       "oofs_unrelated_12          0.969555          0.969558          0.969039   \n",
       "oofs_unrelated_13          0.970029          0.970829          0.969442   \n",
       "oofs_unrelated_14          0.970348          0.973004          0.971613   \n",
       "oofs_unrelated_15          0.969500          0.975851          0.973276   \n",
       "oofs_unrelated_16          0.968974          0.975135          0.971808   \n",
       "oofs_unrelated_17          0.969428          0.973627          0.972618   \n",
       "oofs_unrelated_18          0.970400          0.975486          0.973165   \n",
       "\n",
       "                   oofs_unrelated_3  oofs_unrelated_4  oofs_unrelated_5  \\\n",
       "oofs_unrelated_0           0.982116          0.980407          0.983841   \n",
       "oofs_unrelated_1           0.983235          0.993880          0.983194   \n",
       "oofs_unrelated_2           0.984631          0.985170          0.992641   \n",
       "oofs_unrelated_3           1.000000          0.981340          0.981809   \n",
       "oofs_unrelated_4           0.981340          1.000000          0.985285   \n",
       "oofs_unrelated_5           0.981809          0.985285          1.000000   \n",
       "oofs_unrelated_6           0.994782          0.980849          0.982544   \n",
       "oofs_unrelated_7           0.971740          0.969350          0.964530   \n",
       "oofs_unrelated_8           0.974504          0.972134          0.968744   \n",
       "oofs_unrelated_9           0.972338          0.970553          0.969367   \n",
       "oofs_unrelated_10          0.978712          0.971981          0.970257   \n",
       "oofs_unrelated_11          0.973816          0.965403          0.964712   \n",
       "oofs_unrelated_12          0.977123          0.970200          0.970017   \n",
       "oofs_unrelated_13          0.979705          0.970659          0.969717   \n",
       "oofs_unrelated_14          0.973593          0.971251          0.968046   \n",
       "oofs_unrelated_15          0.981349          0.973084          0.970220   \n",
       "oofs_unrelated_16          0.975971          0.973036          0.969092   \n",
       "oofs_unrelated_17          0.979870          0.972064          0.969949   \n",
       "oofs_unrelated_18          0.982335          0.972748          0.970316   \n",
       "\n",
       "                   oofs_unrelated_6  oofs_unrelated_7  oofs_unrelated_8  \\\n",
       "oofs_unrelated_0           0.982145          0.967810          0.969136   \n",
       "oofs_unrelated_1           0.981397          0.970652          0.972543   \n",
       "oofs_unrelated_2           0.983242          0.964436          0.969656   \n",
       "oofs_unrelated_3           0.994782          0.971740          0.974504   \n",
       "oofs_unrelated_4           0.980849          0.969350          0.972134   \n",
       "oofs_unrelated_5           0.982544          0.964530          0.968744   \n",
       "oofs_unrelated_6           1.000000          0.970761          0.973068   \n",
       "oofs_unrelated_7           0.970761          1.000000          0.986337   \n",
       "oofs_unrelated_8           0.973068          0.986337          1.000000   \n",
       "oofs_unrelated_9           0.971318          0.978792          0.984056   \n",
       "oofs_unrelated_10          0.977646          0.983965          0.987820   \n",
       "oofs_unrelated_11          0.972614          0.980390          0.979701   \n",
       "oofs_unrelated_12          0.976537          0.974268          0.980506   \n",
       "oofs_unrelated_13          0.979104          0.978164          0.981747   \n",
       "oofs_unrelated_14          0.970859          0.978848          0.983653   \n",
       "oofs_unrelated_15          0.978754          0.983940          0.987454   \n",
       "oofs_unrelated_16          0.973566          0.986260          0.996736   \n",
       "oofs_unrelated_17          0.978102          0.975830          0.981314   \n",
       "oofs_unrelated_18          0.979990          0.979610          0.982084   \n",
       "\n",
       "                   oofs_unrelated_9  oofs_unrelated_10  oofs_unrelated_11  \\\n",
       "oofs_unrelated_0           0.971615           0.969398           0.971206   \n",
       "oofs_unrelated_1           0.969819           0.972554           0.966131   \n",
       "oofs_unrelated_2           0.968727           0.970621           0.965539   \n",
       "oofs_unrelated_3           0.972338           0.978712           0.973816   \n",
       "oofs_unrelated_4           0.970553           0.971981           0.965403   \n",
       "oofs_unrelated_5           0.969367           0.970257           0.964712   \n",
       "oofs_unrelated_6           0.971318           0.977646           0.972614   \n",
       "oofs_unrelated_7           0.978792           0.983965           0.980390   \n",
       "oofs_unrelated_8           0.984056           0.987820           0.979701   \n",
       "oofs_unrelated_9           1.000000           0.981037           0.987146   \n",
       "oofs_unrelated_10          0.981037           1.000000           0.979647   \n",
       "oofs_unrelated_11          0.987146           0.979647           1.000000   \n",
       "oofs_unrelated_12          0.986571           0.983315           0.982623   \n",
       "oofs_unrelated_13          0.984970           0.986668           0.984743   \n",
       "oofs_unrelated_14          0.996133           0.980061           0.985059   \n",
       "oofs_unrelated_15          0.979866           0.993637           0.979124   \n",
       "oofs_unrelated_16          0.982942           0.987707           0.978441   \n",
       "oofs_unrelated_17          0.985613           0.983753           0.982137   \n",
       "oofs_unrelated_18          0.983777           0.986783           0.982855   \n",
       "\n",
       "                   oofs_unrelated_12  oofs_unrelated_13  oofs_unrelated_14  \\\n",
       "oofs_unrelated_0            0.969555           0.970029           0.970348   \n",
       "oofs_unrelated_1            0.969558           0.970829           0.973004   \n",
       "oofs_unrelated_2            0.969039           0.969442           0.971613   \n",
       "oofs_unrelated_3            0.977123           0.979705           0.973593   \n",
       "oofs_unrelated_4            0.970200           0.970659           0.971251   \n",
       "oofs_unrelated_5            0.970017           0.969717           0.968046   \n",
       "oofs_unrelated_6            0.976537           0.979104           0.970859   \n",
       "oofs_unrelated_7            0.974268           0.978164           0.978848   \n",
       "oofs_unrelated_8            0.980506           0.981747           0.983653   \n",
       "oofs_unrelated_9            0.986571           0.984970           0.996133   \n",
       "oofs_unrelated_10           0.983315           0.986668           0.980061   \n",
       "oofs_unrelated_11           0.982623           0.984743           0.985059   \n",
       "oofs_unrelated_12           1.000000           0.988826           0.983774   \n",
       "oofs_unrelated_13           0.988826           1.000000           0.983028   \n",
       "oofs_unrelated_14           0.983774           0.983028           1.000000   \n",
       "oofs_unrelated_15           0.981790           0.986013           0.981758   \n",
       "oofs_unrelated_16           0.979319           0.981009           0.984669   \n",
       "oofs_unrelated_17           0.995708           0.988582           0.986677   \n",
       "oofs_unrelated_18           0.986629           0.993820           0.985435   \n",
       "\n",
       "                   oofs_unrelated_15  oofs_unrelated_16  oofs_unrelated_17  \\\n",
       "oofs_unrelated_0            0.969500           0.968974           0.969428   \n",
       "oofs_unrelated_1            0.975851           0.975135           0.973627   \n",
       "oofs_unrelated_2            0.973276           0.971808           0.972618   \n",
       "oofs_unrelated_3            0.981349           0.975971           0.979870   \n",
       "oofs_unrelated_4            0.973084           0.973036           0.972064   \n",
       "oofs_unrelated_5            0.970220           0.969092           0.969949   \n",
       "oofs_unrelated_6            0.978754           0.973566           0.978102   \n",
       "oofs_unrelated_7            0.983940           0.986260           0.975830   \n",
       "oofs_unrelated_8            0.987454           0.996736           0.981314   \n",
       "oofs_unrelated_9            0.979866           0.982942           0.985613   \n",
       "oofs_unrelated_10           0.993637           0.987707           0.983753   \n",
       "oofs_unrelated_11           0.979124           0.978441           0.982137   \n",
       "oofs_unrelated_12           0.981790           0.979319           0.995708   \n",
       "oofs_unrelated_13           0.986013           0.981009           0.988582   \n",
       "oofs_unrelated_14           0.981758           0.984669           0.986677   \n",
       "oofs_unrelated_15           1.000000           0.989113           0.984935   \n",
       "oofs_unrelated_16           0.989113           1.000000           0.982159   \n",
       "oofs_unrelated_17           0.984935           0.982159           1.000000   \n",
       "oofs_unrelated_18           0.988874           0.983709           0.989607   \n",
       "\n",
       "                   oofs_unrelated_18  \n",
       "oofs_unrelated_0            0.970400  \n",
       "oofs_unrelated_1            0.975486  \n",
       "oofs_unrelated_2            0.973165  \n",
       "oofs_unrelated_3            0.982335  \n",
       "oofs_unrelated_4            0.972748  \n",
       "oofs_unrelated_5            0.970316  \n",
       "oofs_unrelated_6            0.979990  \n",
       "oofs_unrelated_7            0.979610  \n",
       "oofs_unrelated_8            0.982084  \n",
       "oofs_unrelated_9            0.983777  \n",
       "oofs_unrelated_10           0.986783  \n",
       "oofs_unrelated_11           0.982855  \n",
       "oofs_unrelated_12           0.986629  \n",
       "oofs_unrelated_13           0.993820  \n",
       "oofs_unrelated_14           0.985435  \n",
       "oofs_unrelated_15           0.988874  \n",
       "oofs_unrelated_16           0.983709  \n",
       "oofs_unrelated_17           0.989607  \n",
       "oofs_unrelated_18           1.000000  "
      ]
     },
     "execution_count": 45,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "unrelated.corr()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Prepare Different Inputs"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# Only use oofs\n",
    "########### FOR RIDGE #############\n",
    "ensemble_trains = trains.values\n",
    "ensemble_tests = tests.values"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# RidgeRegression"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from sklearn.linear_model import RidgeClassifier\n",
    "from sklearn import metrics"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def get_prob(v):\n",
    "    res = []\n",
    "    for d in v:\n",
    "        d = np.exp(d) / np.sum(np.exp(d))\n",
    "        res.append(d)\n",
    "    return np.array(res)\n",
    "\n",
    "def fit_every_feature_model(feature_data, label, feature_test_data, fold_count=3, predict=True):\n",
    "    predictions = np.zeros(shape=[len(feature_test_data), 3])\n",
    "    fold_size = len(feature_data) // fold_count\n",
    "    oofs = []\n",
    "    \n",
    "    log_loss = 0\n",
    "    for fold_id in range(fold_count):\n",
    "        print(\"Fold : \", fold_id)\n",
    "        fold_start = fold_size * fold_id\n",
    "        fold_end = fold_start + fold_size\n",
    "        if fold_id == fold_count - 1:\n",
    "            fold_end = len(feature_data)\n",
    "                \n",
    "        train_x = np.concatenate([feature_data[:fold_start], feature_data[fold_end:]])\n",
    "        train_y = np.concatenate([label[:fold_start], label[fold_end:]])\n",
    "\n",
    "        val_x = feature_data[fold_start:fold_end]\n",
    "        val_y = label[fold_start:fold_end]        \n",
    "        \n",
    "        clf = RidgeClassifier().fit(train_x, train_y)\n",
    "        print(\"Score\", clf.score(val_x, val_y))\n",
    "        \n",
    "        if predict:\n",
    "            prediction = get_prob(clf.decision_function(feature_test_data))\n",
    "            oof_prediction = get_prob(clf.decision_function(val_x))\n",
    "\n",
    "            score = metrics.log_loss(val_y, oof_prediction)\n",
    "            print(\"Fold\", fold_id, \"log loss\", score)\n",
    "            log_loss += score\n",
    "            oofs.append(oof_prediction)\n",
    "            predictions += prediction        \n",
    "        \n",
    "    predictions /= fold_count   \n",
    "    print(\"Training  Finish\")\n",
    "\n",
    "    return predictions, log_loss / fold_count, oofs"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Fold :  0\n",
      "Score 0.8934643581344563\n",
      "Fold 0 log loss 0.4350941487452475\n",
      "Fold :  1\n",
      "Score 0.8997660271408516\n",
      "Fold 1 log loss 0.4150244164948867\n",
      "Fold :  2\n",
      "Score 0.8911558259241928\n",
      "Fold 2 log loss 0.4347878648108567\n",
      "Fold :  3\n",
      "Score 0.8863203868351271\n",
      "Fold 3 log loss 0.44601551541715645\n",
      "Fold :  4\n",
      "Score 0.900701918577445\n",
      "Fold 4 log loss 0.4255465423154889\n",
      "Fold :  5\n",
      "Score 0.8869755108407424\n",
      "Fold 5 log loss 0.44378103728851853\n",
      "Fold :  6\n",
      "Score 0.8762127593199189\n",
      "Fold 6 log loss 0.4554272209406775\n",
      "Fold :  7\n",
      "Score 0.8882545624707534\n",
      "Fold 7 log loss 0.4404644745130055\n",
      "Fold :  8\n",
      "Score 0.8920605209795663\n",
      "Fold 8 log loss 0.43701455725662064\n",
      "Fold :  9\n",
      "Score 0.8943132545153945\n",
      "Fold 9 log loss 0.43079371968839936\n",
      "Training  Finish\n"
     ]
    }
   ],
   "source": [
    "pred, log_loss, oofs = fit_every_feature_model(ensemble_trains, labels, ensemble_tests, fold_count=10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 50,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def np_weighted_accuracy(y_true, y_pred):\n",
    "    weight = np.array([[1/16, 1/15, 1/5]])\n",
    "    norm = [(1/16) + (1/15) + (1/5)]\n",
    "    weight_mask = weight * y_true\n",
    "    weight_mask = np.max(weight_mask, axis=-1)\n",
    "    norms = np.sum(weight_mask)\n",
    "    \n",
    "    y_true = np.argmax(y_true, axis=-1)\n",
    "    y_pred = np.argmax(y_pred, axis=-1)\n",
    "    \n",
    "    res = ((y_true == y_pred) * weight_mask).sum() / norms\n",
    "    return res"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 51,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "oofs = np.concatenate(oofs)\n",
    "score = np_weighted_accuracy(to_categorical(labels), oofs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 52,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "score 0.8690726824017866\n",
      "Predicting training results...\n",
      "Predicting labeled testing results...\n"
     ]
    }
   ],
   "source": [
    "print(\"score\", score)\n",
    "oofs_dir = \"../data/p_ensemble/oofs/\"\n",
    "output_dir = \"../data/p_ensemble/preds/\"\n",
    "onehot_pred_dir = \"../data/p_ensemble/nn_one_hot/\"\n",
    "\n",
    "model_submit_prefix = \"Ridge-Ensemble\"\n",
    "\n",
    "oofs_path = oofs_dir + model_submit_prefix\n",
    "output_path = output_dir + model_submit_prefix\n",
    "one_hot_pred_path = onehot_pred_dir + \"One-Hot\" + model_submit_prefix\n",
    "\n",
    "print(\"Predicting training results...\")\n",
    "oofs = pd.DataFrame({\"unrelated\": oofs[:, 0], \"agreed\": oofs[:, 1], \"disagreed\": oofs[:, 2]})\n",
    "submit_path = oofs_path + \"-Train-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "oofs.to_csv(submit_path, index=False)\n",
    "\n",
    "test_predicts = pd.DataFrame({\"unrelated\": pred[:, 0], \"agreed\": pred[:, 1], \"disagreed\": pred[:, 2]})\n",
    "submit_path = output_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "test_predicts.to_csv(submit_path, index=False) # 0.3343\n",
    "\n",
    "print(\"Predicting labeled testing results...\")\n",
    "ids = pd.read_csv(\"../data/dataset/test.csv\")\n",
    "pred_labels = test_predicts.idxmax(axis=1)\n",
    "sub = pd.DataFrame({\"Id\": ids['id'].values, \"Category\": pred_labels})\n",
    "submit_path = one_hot_pred_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "sub.to_csv(submit_path, index=False)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Scaled"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 53,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def get_prob(v):\n",
    "    res = []\n",
    "    for d in v:\n",
    "        d = np.exp(d) / np.sum(np.exp(d))\n",
    "        res.append(d)\n",
    "    return np.array(res)\n",
    "\n",
    "def fit_every_feature_model(feature_data, label, feature_test_data, fold_count=3, predict=True):\n",
    "    predictions = np.zeros(shape=[len(feature_test_data), 3])\n",
    "    fold_size = len(feature_data) // fold_count\n",
    "    oofs = []\n",
    "    \n",
    "    log_loss = 0\n",
    "    for fold_id in range(fold_count):\n",
    "        print(\"Fold : \", fold_id)\n",
    "        fold_start = fold_size * fold_id\n",
    "        fold_end = fold_start + fold_size\n",
    "        if fold_id == fold_count - 1:\n",
    "            fold_end = len(feature_data)\n",
    "                \n",
    "        train_x = np.concatenate([feature_data[:fold_start], feature_data[fold_end:]])\n",
    "        train_y = np.concatenate([label[:fold_start], label[fold_end:]])\n",
    "\n",
    "        val_x = feature_data[fold_start:fold_end]\n",
    "        val_y = label[fold_start:fold_end]        \n",
    "        \n",
    "        clf = RidgeClassifier(class_weight={0: 1/16, 1:1/15, 2: 1/5}).fit(train_x, train_y)\n",
    "        print(\"Score\", clf.score(val_x, val_y))\n",
    "        \n",
    "        if predict:\n",
    "            prediction = get_prob(clf.decision_function(feature_test_data))\n",
    "            oof_prediction = get_prob(clf.decision_function(val_x))\n",
    "\n",
    "            score = metrics.log_loss(val_y, oof_prediction)\n",
    "            print(\"Fold\", fold_id, \"log loss\", score)\n",
    "            log_loss += score\n",
    "            oofs.append(oof_prediction)\n",
    "            predictions += prediction        \n",
    "        \n",
    "    predictions /= fold_count   \n",
    "    print(\"Training  Finish\")\n",
    "\n",
    "    return predictions, log_loss / fold_count, oofs"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 54,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Fold :  0\n",
      "Score 0.8881297769458742\n",
      "Fold 0 log loss 0.4469899728250228\n",
      "Fold :  1\n",
      "Score 0.8964592107315551\n",
      "Fold 1 log loss 0.42423624607529403\n",
      "Fold :  2\n",
      "Score 0.8879737950397754\n",
      "Fold 2 log loss 0.4430657654748634\n",
      "Fold :  3\n",
      "Score 0.8800187178287319\n",
      "Fold 3 log loss 0.45630396639771914\n",
      "Fold :  4\n",
      "Score 0.8979566370301045\n",
      "Fold 4 log loss 0.4351545662364465\n",
      "Fold :  5\n",
      "Score 0.8824832319450944\n",
      "Fold 5 log loss 0.4534675602155028\n",
      "Fold :  6\n",
      "Score 0.8762127593199189\n",
      "Fold 6 log loss 0.46065279146871324\n",
      "Fold :  7\n",
      "Score 0.8822648572765559\n",
      "Fold 7 log loss 0.4508315765795806\n",
      "Fold :  8\n",
      "Score 0.8888472937139292\n",
      "Fold 8 log loss 0.44494612876787215\n",
      "Fold :  9\n",
      "Score 0.8913809776335901\n",
      "Fold 9 log loss 0.43956186391668595\n",
      "Training  Finish\n"
     ]
    }
   ],
   "source": [
    "pred, log_loss, oofs = fit_every_feature_model(ensemble_trains, labels, ensemble_tests, fold_count=10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "oofs = np.concatenate(oofs)\n",
    "score = np_weighted_accuracy(to_categorical(labels), oofs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 56,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "score 0.8777104350553753\n",
      "Predicting training results...\n",
      "Predicting labeled testing results...\n"
     ]
    }
   ],
   "source": [
    "print(\"score\", score)\n",
    "oofs_dir = \"../data/p_ensemble/oofs/\"\n",
    "output_dir = \"../data/p_ensemble/preds/\"\n",
    "onehot_pred_dir = \"../data/p_ensemble/nn_one_hot/\"\n",
    "\n",
    "model_submit_prefix = \"RidgeScaled-Ensemble\"\n",
    "\n",
    "oofs_path = oofs_dir + model_submit_prefix\n",
    "output_path = output_dir + model_submit_prefix\n",
    "one_hot_pred_path = onehot_pred_dir + \"One-Hot\" + model_submit_prefix\n",
    "\n",
    "print(\"Predicting training results...\")\n",
    "oofs = pd.DataFrame({\"unrelated\": oofs[:, 0], \"agreed\": oofs[:, 1], \"disagreed\": oofs[:, 2]})\n",
    "submit_path = oofs_path + \"-Train-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "oofs.to_csv(submit_path, index=False)\n",
    "\n",
    "test_predicts = pd.DataFrame({\"unrelated\": pred[:, 0], \"agreed\": pred[:, 1], \"disagreed\": pred[:, 2]})\n",
    "submit_path = output_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "test_predicts.to_csv(submit_path, index=False) # 0.3343\n",
    "\n",
    "print(\"Predicting labeled testing results...\")\n",
    "ids = pd.read_csv(\"../data/dataset/test.csv\")\n",
    "pred_labels = test_predicts.idxmax(axis=1)\n",
    "sub = pd.DataFrame({\"Id\": ids['id'].values, \"Category\": pred_labels})\n",
    "submit_path = one_hot_pred_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "sub.to_csv(submit_path, index=False)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": true
   },
   "source": [
    "# Logistic Ensembler"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 78,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from sklearn.neighbors import KNeighborsClassifier\n",
    "from sklearn.naive_bayes import GaussianNB\n",
    "from sklearn.ensemble import RandomForestClassifier\n",
    "from sklearn.linear_model import LogisticRegression, BayesianRidge, SGDClassifier"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 58,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def get_prob(v):\n",
    "    res = []\n",
    "    for d in v:\n",
    "        d = np.exp(d) / np.sum(np.exp(d))\n",
    "        res.append(d)\n",
    "    return np.array(res)\n",
    "\n",
    "def fit_every_feature_model(feature_data, label, feature_test_data, fold_count=3, predict=True):\n",
    "    predictions = np.zeros(shape=[len(feature_test_data), 3])\n",
    "    fold_size = len(feature_data) // fold_count\n",
    "    oofs = []\n",
    "    \n",
    "    log_loss = 0\n",
    "    for fold_id in range(fold_count):\n",
    "        print(\"Fold : \", fold_id)\n",
    "        fold_start = fold_size * fold_id\n",
    "        fold_end = fold_start + fold_size\n",
    "        if fold_id == fold_count - 1:\n",
    "            fold_end = len(feature_data)\n",
    "                \n",
    "        train_x = np.concatenate([feature_data[:fold_start], feature_data[fold_end:]])\n",
    "        train_y = np.concatenate([label[:fold_start], label[fold_end:]])\n",
    "\n",
    "        val_x = feature_data[fold_start:fold_end]\n",
    "        val_y = label[fold_start:fold_end]        \n",
    "        \n",
    "        clf = LogisticRegression().fit(train_x, train_y)\n",
    "        print(\"Score\", clf.score(val_x, val_y))\n",
    "        \n",
    "        if predict:\n",
    "            prediction = clf.predict_proba(feature_test_data)\n",
    "            oof_prediction = clf.predict_proba(val_x)\n",
    "\n",
    "            score = metrics.log_loss(val_y, oof_prediction)\n",
    "            print(\"Fold\", fold_id, \"log loss\", score)\n",
    "            log_loss += score\n",
    "            oofs.append(oof_prediction)\n",
    "            predictions += prediction        \n",
    "        \n",
    "    predictions /= fold_count   \n",
    "    print(\"Training  Finish\")\n",
    "\n",
    "    return predictions, log_loss / fold_count, oofs"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 59,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Fold :  0\n",
      "Score 0.8930588051785993\n",
      "Fold 0 log loss 0.26195867040356585\n",
      "Fold :  1\n",
      "Score 0.8994228669474341\n",
      "Fold 1 log loss 0.24231697067161798\n",
      "Fold :  2\n",
      "Score 0.8911870223054126\n",
      "Fold 2 log loss 0.2643755095049082\n",
      "Fold :  3\n",
      "Score 0.8871938855092809\n",
      "Fold 3 log loss 0.2755187323987561\n",
      "Fold :  4\n",
      "Score 0.8991732958976759\n",
      "Fold 4 log loss 0.24703062814133606\n",
      "Fold :  5\n",
      "Score 0.8874434565590391\n",
      "Fold 5 log loss 0.2677488047703479\n",
      "Fold :  6\n",
      "Score 0.875214475120886\n",
      "Fold 6 log loss 0.2942973136666923\n",
      "Fold :  7\n",
      "Score 0.8889096864763687\n",
      "Fold 7 log loss 0.2674502654740498\n",
      "Fold :  8\n",
      "Score 0.8932771798471377\n",
      "Fold 8 log loss 0.2611251260650516\n",
      "Fold :  9\n",
      "Score 0.8949995320834763\n",
      "Fold 9 log loss 0.2569134369884683\n",
      "Training  Finish\n"
     ]
    }
   ],
   "source": [
    "pred, log_loss, oofs = fit_every_feature_model(ensemble_trains, labels, ensemble_tests, fold_count=10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 60,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "oofs = np.concatenate(oofs)\n",
    "score = np_weighted_accuracy(to_categorical(labels), oofs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 61,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "score 0.8706254369175598\n",
      "Predicting training results...\n",
      "Predicting labeled testing results...\n"
     ]
    }
   ],
   "source": [
    "print(\"score\", score)\n",
    "oofs_dir = \"../data/p_ensemble/oofs/\"\n",
    "output_dir = \"../data/p_ensemble/preds/\"\n",
    "onehot_pred_dir = \"../data/p_ensemble/nn_one_hot/\"\n",
    "\n",
    "model_submit_prefix = \"Logistic-Ensemble\"\n",
    "\n",
    "oofs_path = oofs_dir + model_submit_prefix\n",
    "output_path = output_dir + model_submit_prefix\n",
    "one_hot_pred_path = onehot_pred_dir + \"One-Hot\" + model_submit_prefix\n",
    "\n",
    "print(\"Predicting training results...\")\n",
    "oofs = pd.DataFrame({\"unrelated\": oofs[:, 0], \"agreed\": oofs[:, 1], \"disagreed\": oofs[:, 2]})\n",
    "submit_path = oofs_path + \"-Train-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "oofs.to_csv(submit_path, index=False)\n",
    "\n",
    "test_predicts = pd.DataFrame({\"unrelated\": pred[:, 0], \"agreed\": pred[:, 1], \"disagreed\": pred[:, 2]})\n",
    "submit_path = output_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "test_predicts.to_csv(submit_path, index=False) # 0.3343\n",
    "\n",
    "print(\"Predicting labeled testing results...\")\n",
    "ids = pd.read_csv(\"../data/dataset/test.csv\")\n",
    "pred_labels = test_predicts.idxmax(axis=1)\n",
    "sub = pd.DataFrame({\"Id\": ids['id'].values, \"Category\": pred_labels})\n",
    "submit_path = one_hot_pred_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "sub.to_csv(submit_path, index=False)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": true
   },
   "source": [
    "## Scaled"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 62,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def get_prob(v):\n",
    "    res = []\n",
    "    for d in v:\n",
    "        d = np.exp(d) / np.sum(np.exp(d))\n",
    "        res.append(d)\n",
    "    return np.array(res)\n",
    "\n",
    "def fit_every_feature_model(feature_data, label, feature_test_data, fold_count=3, predict=True):\n",
    "    predictions = np.zeros(shape=[len(feature_test_data), 3])\n",
    "    fold_size = len(feature_data) // fold_count\n",
    "    oofs = []\n",
    "    \n",
    "    log_loss = 0\n",
    "    for fold_id in range(fold_count):\n",
    "        print(\"Fold : \", fold_id)\n",
    "        fold_start = fold_size * fold_id\n",
    "        fold_end = fold_start + fold_size\n",
    "        if fold_id == fold_count - 1:\n",
    "            fold_end = len(feature_data)\n",
    "                \n",
    "        train_x = np.concatenate([feature_data[:fold_start], feature_data[fold_end:]])\n",
    "        train_y = np.concatenate([label[:fold_start], label[fold_end:]])\n",
    "\n",
    "        val_x = feature_data[fold_start:fold_end]\n",
    "        val_y = label[fold_start:fold_end]        \n",
    "        \n",
    "        clf = LogisticRegression(class_weight={0: 1/16, 1:1/15, 2: 1/5}).fit(train_x, train_y)\n",
    "        print(\"Score\", clf.score(val_x, val_y))\n",
    "        \n",
    "        if predict:\n",
    "            prediction = clf.predict_proba(feature_test_data)\n",
    "            oof_prediction = clf.predict_proba(val_x)\n",
    "\n",
    "            score = metrics.log_loss(val_y, oof_prediction)\n",
    "            print(\"Fold\", fold_id, \"log loss\", score)\n",
    "            log_loss += score\n",
    "            oofs.append(oof_prediction)\n",
    "            predictions += prediction        \n",
    "        \n",
    "    predictions /= fold_count   \n",
    "    print(\"Training  Finish\")\n",
    "\n",
    "    return predictions, log_loss / fold_count, oofs"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 63,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Fold :  0\n",
      "Score 0.8905007019185774\n",
      "Fold 0 log loss 0.3387896340363898\n",
      "Fold :  1\n",
      "Score 0.8987053501793791\n",
      "Fold 1 log loss 0.3108030978317047\n",
      "Fold :  2\n",
      "Score 0.8901575417251599\n",
      "Fold 2 log loss 0.3331646136281357\n",
      "Fold :  3\n",
      "Score 0.8842614256746217\n",
      "Fold 3 log loss 0.345689462857064\n",
      "Fold :  4\n",
      "Score 0.8987989393230386\n",
      "Fold 4 log loss 0.30834979744613983\n",
      "Fold :  5\n",
      "Score 0.8858212447356106\n",
      "Fold 5 log loss 0.33644688432046016\n",
      "Fold :  6\n",
      "Score 0.8763063484635782\n",
      "Fold 6 log loss 0.36819509793620286\n",
      "Fold :  7\n",
      "Score 0.8869131180783029\n",
      "Fold 7 log loss 0.3377457218367191\n",
      "Fold :  8\n",
      "Score 0.8923100920293247\n",
      "Fold 8 log loss 0.3276271239778752\n",
      "Fold :  9\n",
      "Score 0.8944692266899585\n",
      "Fold 9 log loss 0.3214368732406488\n",
      "Training  Finish\n"
     ]
    }
   ],
   "source": [
    "pred, log_loss, oofs = fit_every_feature_model(ensemble_trains, labels, ensemble_tests, fold_count=10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 64,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "oofs = np.concatenate(oofs)\n",
    "score = np_weighted_accuracy(to_categorical(labels), oofs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 65,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "score 0.8750042278447884\n",
      "Predicting training results...\n",
      "Predicting labeled testing results...\n"
     ]
    }
   ],
   "source": [
    "print(\"score\", score)\n",
    "oofs_dir = \"../data/p_ensemble/oofs/\"\n",
    "output_dir = \"../data/p_ensemble/preds/\"\n",
    "onehot_pred_dir = \"../data/p_ensemble/nn_one_hot/\"\n",
    "\n",
    "model_submit_prefix = \"LogisticScaled-Ensemble\"\n",
    "\n",
    "oofs_path = oofs_dir + model_submit_prefix\n",
    "output_path = output_dir + model_submit_prefix\n",
    "one_hot_pred_path = onehot_pred_dir + \"One-Hot\" + model_submit_prefix\n",
    "\n",
    "print(\"Predicting training results...\")\n",
    "oofs = pd.DataFrame({\"unrelated\": oofs[:, 0], \"agreed\": oofs[:, 1], \"disagreed\": oofs[:, 2]})\n",
    "submit_path = oofs_path + \"-Train-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "oofs.to_csv(submit_path, index=False)\n",
    "\n",
    "test_predicts = pd.DataFrame({\"unrelated\": pred[:, 0], \"agreed\": pred[:, 1], \"disagreed\": pred[:, 2]})\n",
    "submit_path = output_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "test_predicts.to_csv(submit_path, index=False) # 0.3343\n",
    "\n",
    "print(\"Predicting labeled testing results...\")\n",
    "ids = pd.read_csv(\"../data/dataset/test.csv\")\n",
    "pred_labels = test_predicts.idxmax(axis=1)\n",
    "sub = pd.DataFrame({\"Id\": ids['id'].values, \"Category\": pred_labels})\n",
    "submit_path = one_hot_pred_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "sub.to_csv(submit_path, index=False)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.3"
  },
  "widgets": {
   "application/vnd.jupyter.widget-state+json": {
    "state": {},
    "version_major": 1,
    "version_minor": 0
   }
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
