{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "ename": "IndexError",
     "evalue": "list assignment index out of range",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mIndexError\u001b[0m                                Traceback (most recent call last)",
      "\u001b[1;32m<ipython-input-8-83bbe24212c4>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m    188\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0mopen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'train.csv'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m'r'\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mcsvfile\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    189\u001b[0m     \u001b[0mData\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mcsv\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mreader\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mcsvfile\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mdelimiter\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;34m'#'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 190\u001b[1;33m     \u001b[0mX_Data\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mParse\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mData\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;36m1\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    191\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    192\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0mopen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'test_without_label.csv'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m'r'\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mcsvfile\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32m<ipython-input-8-83bbe24212c4>\u001b[0m in \u001b[0;36mParse\u001b[1;34m(Data, Is_train)\u001b[0m\n\u001b[0;32m    154\u001b[0m             \u001b[0moffset\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mDict_attr\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m*\u001b[0m \u001b[1;36m13\u001b[0m\u001b[1;33m;\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    155\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 156\u001b[1;33m             \u001b[0mCur_vec\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mpos\u001b[0m \u001b[1;33m+\u001b[0m \u001b[0moffset\u001b[0m \u001b[1;33m+\u001b[0m \u001b[0mDict_attr\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;36m1\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    157\u001b[0m             \u001b[0mCur_vec\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mpos\u001b[0m \u001b[1;33m+\u001b[0m \u001b[0moffset\u001b[0m \u001b[1;33m+\u001b[0m \u001b[1;36m9\u001b[0m \u001b[1;33m+\u001b[0m \u001b[0mDict_comp\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;36m1\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    158\u001b[0m             \u001b[0mtable_name\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m(\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msplit\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'.'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;31mIndexError\u001b[0m: list assignment index out of range"
     ]
    }
   ],
   "source": [
    "import csv\n",
    "import math\n",
    "# from sklearn import ensemble\n",
    "# from sklearn import datasets\n",
    "# from sklearn.utils import shuffle\n",
    "# from sklearn.metrics import mean_squared_error\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import matplotlib.pyplot as plt\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn.neural_network import MLPRegressor\n",
    "from sklearn.neural_network import MLPClassifier\n",
    "from sklearn.metrics import accuracy_score\n",
    "from sklearn.tree import DecisionTreeClassifier\n",
    "from sklearn.ensemble import BaggingClassifier\n",
    "from sklearn.ensemble import RandomForestClassifier    \n",
    "from sklearn.ensemble import AdaBoostClassifier\n",
    "from sklearn.ensemble import GradientBoostingClassifier\n",
    "from sklearn.ensemble import GradientBoostingRegressor\n",
    "from sklearn.model_selection import KFold\n",
    "#导入GridSearch\n",
    "from sklearn.model_selection import GridSearchCV\n",
    "#使用随机森林作为模型\n",
    "\n",
    "'''\n",
    "movie_id\n",
    "keyword_id\n",
    "id\n",
    "info_type_id\n",
    "role_id\n",
    "person_id\n",
    "company_type_id\n",
    "company_id\n",
    "production_year\n",
    "kind_id\n",
    "'''\n",
    "Cnt = 0;\n",
    "\n",
    "Dict_table = {}\n",
    "Num_table = {}\n",
    "\n",
    "Dict_table['t'] = 0\n",
    "Dict_table['mc'] = 1\n",
    "Dict_table['ci'] = 2\n",
    "Dict_table['mi'] = 3\n",
    "Dict_table['mi_idx'] = 4\n",
    "Dict_table['mk'] = 5\n",
    "\n",
    "Dict_attr = {}\n",
    "Dict_attr['t.production_year'] = 0\n",
    "Dict_attr['mi.info_type_id'] = 1\n",
    "Dict_attr['t.kind_id'] = 2\n",
    "Dict_attr['mi_idx.info_type_id'] = 3\n",
    "Dict_attr['mk.keyword_id'] = 4\n",
    "Dict_attr['ci.person_id'] = 5\n",
    "Dict_attr['mc.company_id'] = 6\n",
    "Dict_attr['mc.company_type_id'] = 7\n",
    "Dict_attr['ci.role_id'] = 8\n",
    "\n",
    "Dict_join = {}\n",
    "Dict_join[''] = 0\n",
    "Dict_join['t.id=mc.movie_id'] = 1\n",
    "Dict_join['t.id=mi.movie_id'] = 2\n",
    "Dict_join['t.id=mi_idx.movie_id'] = 3\n",
    "Dict_join['t.id=ci.movie_id'] = 4\n",
    "Dict_join['t.id=mk.movie_id'] = 5\n",
    "\n",
    "\n",
    "# t.production_year\n",
    "# mi.info_type_id\n",
    "# t.kind_id\n",
    "# mi_idx.info_type_id\n",
    "# mk.keyword_id\n",
    "# ci.person_id\n",
    "# mc.company_id\n",
    "# mc.company_type_id\n",
    "# ci.role_id\n",
    "\n",
    "Dict_comp = {}\n",
    "Dict_comp['>'] = 0\n",
    "Dict_comp['='] = 1\n",
    "Dict_comp['<'] = 2\n",
    "\n",
    "Dict_max = {}\n",
    "Dict_min = {}\n",
    "\n",
    "with open('column_min_max_vals.csv', 'r') as csvfile:\n",
    "    Max_list = csv.reader(csvfile)\n",
    "    for Max_data in Max_list:\n",
    "        if (Max_data[0] == \"name\"):\n",
    "            continue\n",
    "        Dict_max[Max_data[0]] = Max_data[2]\n",
    "        Dict_min[Max_data[0]] = Max_data[1]\n",
    "        Num_table[Max_data[0].split('.')[0]] = 1/int(Max_data[3])\n",
    "\n",
    "# def Getkey(element):\n",
    "#     return Dict_table[element[0].split('.')[0]];\n",
    "# # data load and feature abstract\n",
    "\n",
    "def Parse(Data, Is_train):\n",
    "    \n",
    "    Feature_vec = []\n",
    "    Feature_target = []\n",
    "    \n",
    "    for Sql in Data:\n",
    "        #\n",
    "        Table_name = []\n",
    "        Join_table = []\n",
    "        Condition = []\n",
    "        Target = 0\n",
    "#         Cnt = Cnt + 1\n",
    "#         if (Cnt > 10):\n",
    "#             break\n",
    "        Table_name = Sql[0].split(',')\n",
    "        Join_table = Sql[1].split(',')\n",
    "\n",
    "        Cur_cond = Sql[2].split(',')\n",
    "        Cur_Size = len(Cur_cond)\n",
    "        Cur_Cnt = 0\n",
    "        while (Cur_Cnt < Cur_Size):\n",
    "            Cur_list = []\n",
    "            Cur_list.append(Cur_cond[Cur_Cnt])\n",
    "            Cur_list.append(Cur_cond[Cur_Cnt+1])\n",
    "            Cur_list.append(Cur_cond[Cur_Cnt+2])\n",
    "            Condition.append(Cur_list)\n",
    "            Cur_Cnt = Cur_Cnt + 3\n",
    "        \n",
    "        if (Is_train == 1):\n",
    "            Target = int(Sql[3])\n",
    "\n",
    "        # Create vec\n",
    "        Cur_vec = [0.0] * (18 + 12 + 6 * (9 + 3 + 1))\n",
    "#        for i in range(48):\n",
    "#            Cur_vec.append(0.0)\n",
    "        \n",
    "        pos = 0\n",
    "        card_sum = 0\n",
    "        for Cur_table_name in Table_name:\n",
    "            card_sum += Num_table[Cur_table_name.split(' ')[-1]]\n",
    "        for Cur_table_name in Table_name:\n",
    "            table_name = Cur_table_name.split(' ')[-1]\n",
    "            Cur_vec[pos + Dict_table[table_name]] = 1#len(Table_name) + Num_table[table_name]/card_sum\n",
    "            pos += 6\n",
    "            \n",
    "        pos = 18\n",
    "\n",
    "        for Cur_join in Join_table:\n",
    "            Cur_vec[Dict_join[Cur_join] + pos] = 1\n",
    "            pos = pos + 6\n",
    "            \n",
    "        pos = 30\n",
    "        \n",
    "        for i in Condition:\n",
    "            offset = 0#Dict_attr[i[0]] * 13;\n",
    "            \n",
    "            Cur_vec[pos + Dict_attr[i[0]]] = 1 \n",
    "            Cur_vec[pos + 9 + Dict_comp[i[1]]] = 1 \n",
    "            table_name = (i[0].split('.'))[0]\n",
    "            Cur_vec[pos + 12] = (1.0 * int(i[2]) - 1.0* int(Dict_min[i[0]]) ) / (1.0 * int(Dict_max[i[0]]) - 1.0 * int(Dict_min[i[0]]))\n",
    "            \n",
    "            pos += 13\n",
    "        \n",
    "#         if (Is_train == 0):\n",
    "#             print (pos)\n",
    "#             print (Sql)\n",
    "#             print (Cur_vec)\n",
    "#             print(\"\")\n",
    "        Feature_vec.append(Cur_vec)\n",
    "    \n",
    "        MAX = 5e8\n",
    "        \n",
    "        if (Is_train == 1):\n",
    "            cur = 1\n",
    "            e = math.exp(1)\n",
    "            vec = []\n",
    "            while (cur < MAX) :\n",
    "                if (Target >= cur and Target < cur * e) : \n",
    "                    vec.append(1.0)\n",
    "                else :\n",
    "                    vec.append(0.0)\n",
    "                cur *= e\n",
    "            Feature_target.append(math.log(1.0*Target))   \n",
    "           # Feature_target.append(vec)\n",
    "#     print (Feature_target)\n",
    "    return [Feature_vec, Feature_target]\n",
    "\n",
    "X_Data = []\n",
    "Y_Data = []\n",
    "\n",
    "with open('train.csv', 'r') as csvfile:\n",
    "    Data = csv.reader(csvfile, delimiter = '#')\n",
    "    X_Data = Parse(Data, 1)\n",
    "\n",
    "with open('test_without_label.csv', 'r') as csvfile:\n",
    "    Data = csv.reader(csvfile, delimiter = '#')\n",
    "    Y_Data = Parse(Data, 0)\n",
    "\n",
    "X_train = X_Data[0]\n",
    "Y_train = X_Data[1]\n",
    "#Y_train = [x/max(X_Data[1]) for x in Y_train]\n",
    "\n",
    "X_test = Y_Data[0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 1, loss = 16.57771546\n",
      "Iteration 2, loss = 3.33537337\n",
      "Iteration 3, loss = 2.30793311\n",
      "Iteration 4, loss = 1.92571748\n",
      "Iteration 5, loss = 1.74530393\n",
      "Iteration 6, loss = 1.65557261\n",
      "Iteration 7, loss = 1.60038608\n",
      "Iteration 8, loss = 1.55810199\n",
      "Iteration 9, loss = 1.53183663\n",
      "Iteration 10, loss = 1.50515683\n",
      "Iteration 11, loss = 1.48472133\n",
      "Iteration 12, loss = 1.45986949\n",
      "Iteration 13, loss = 1.44069431\n",
      "Iteration 14, loss = 1.42522419\n",
      "Iteration 15, loss = 1.41352155\n",
      "Iteration 16, loss = 1.38935468\n",
      "Iteration 17, loss = 1.37529960\n",
      "Iteration 18, loss = 1.35561279\n",
      "Iteration 19, loss = 1.33264083\n",
      "Iteration 20, loss = 1.32282446\n",
      "Iteration 21, loss = 1.29691260\n",
      "Iteration 22, loss = 1.27881363\n",
      "Iteration 23, loss = 1.26066405\n",
      "Iteration 24, loss = 1.24425417\n",
      "Iteration 25, loss = 1.22232587\n",
      "Iteration 26, loss = 1.20214331\n",
      "Iteration 27, loss = 1.18004771\n",
      "Iteration 28, loss = 1.16064340\n",
      "Iteration 29, loss = 1.14231945\n",
      "Iteration 30, loss = 1.12447120\n",
      "Iteration 31, loss = 1.10633566\n",
      "Iteration 32, loss = 1.08754626\n",
      "Iteration 33, loss = 1.06673622\n",
      "Iteration 34, loss = 1.05163627\n",
      "Iteration 35, loss = 1.03779133\n",
      "Iteration 36, loss = 1.02236112\n",
      "Iteration 37, loss = 1.01111201\n",
      "Iteration 38, loss = 1.00730413\n",
      "Iteration 39, loss = 0.98145210\n",
      "Iteration 40, loss = 0.97064328\n",
      "Iteration 41, loss = 0.95646332\n",
      "Iteration 42, loss = 0.94679259\n",
      "Iteration 43, loss = 0.93834308\n",
      "Iteration 44, loss = 0.92533920\n",
      "Iteration 45, loss = 0.91835889\n",
      "Iteration 46, loss = 0.91367262\n",
      "Iteration 47, loss = 0.90388902\n",
      "Iteration 48, loss = 0.89872809\n",
      "Iteration 49, loss = 0.89153249\n",
      "Iteration 50, loss = 0.88097594\n",
      "Iteration 51, loss = 0.87784687\n",
      "Iteration 52, loss = 0.86998099\n",
      "Iteration 53, loss = 0.86254946\n",
      "Iteration 54, loss = 0.85970412\n",
      "Iteration 55, loss = 0.85381660\n",
      "Iteration 56, loss = 0.84788987\n",
      "Iteration 57, loss = 0.84211989\n",
      "Iteration 58, loss = 0.83876337\n",
      "Iteration 59, loss = 0.83378126\n",
      "Iteration 60, loss = 0.83026116\n",
      "Iteration 61, loss = 0.82461797\n",
      "Iteration 62, loss = 0.81726198\n",
      "Iteration 63, loss = 0.81036052\n",
      "Iteration 64, loss = 0.81905526\n",
      "Iteration 65, loss = 0.81495744\n",
      "Iteration 66, loss = 0.79755096\n",
      "Iteration 67, loss = 0.79215185\n",
      "Iteration 68, loss = 0.78879601\n",
      "Iteration 69, loss = 0.78575581\n",
      "Iteration 70, loss = 0.78041707\n",
      "Iteration 71, loss = 0.77539742\n",
      "Iteration 72, loss = 0.76939047\n",
      "Iteration 73, loss = 0.76990635\n",
      "Iteration 74, loss = 0.76576784\n",
      "Iteration 75, loss = 0.76251278\n",
      "Iteration 76, loss = 0.75942520\n",
      "Iteration 77, loss = 0.75847758\n",
      "Iteration 78, loss = 0.76189659\n",
      "Iteration 79, loss = 0.74552814\n",
      "Iteration 80, loss = 0.74621283\n",
      "Iteration 81, loss = 0.74236417\n",
      "Iteration 82, loss = 0.74431693\n",
      "Iteration 83, loss = 0.74019816\n",
      "Iteration 84, loss = 0.73034074\n",
      "Iteration 85, loss = 0.73207247\n",
      "Iteration 86, loss = 0.72481972\n",
      "Iteration 87, loss = 0.72478564\n",
      "Iteration 88, loss = 0.72815502\n",
      "Iteration 89, loss = 0.71820935\n",
      "Iteration 90, loss = 0.70964964\n",
      "Iteration 91, loss = 0.71051430\n",
      "Iteration 92, loss = 0.70809818\n",
      "Iteration 93, loss = 0.70167968\n",
      "Iteration 94, loss = 0.70272808\n",
      "Iteration 95, loss = 0.69852340\n",
      "Iteration 96, loss = 0.69133160\n",
      "Iteration 97, loss = 0.70631242\n",
      "Iteration 98, loss = 0.70760890\n",
      "Iteration 99, loss = 0.71938344\n",
      "Iteration 100, loss = 0.68974971\n",
      "Iteration 101, loss = 0.68474133\n",
      "Iteration 102, loss = 0.68482628\n",
      "Iteration 103, loss = 0.68161551\n",
      "Iteration 104, loss = 0.67456323\n",
      "Iteration 105, loss = 0.67755583\n",
      "Iteration 106, loss = 0.68510178\n",
      "Iteration 107, loss = 0.67237079\n",
      "Iteration 108, loss = 0.66230468\n",
      "Iteration 109, loss = 0.66419170\n",
      "Iteration 110, loss = 0.67594766\n",
      "Iteration 111, loss = 0.67070132\n",
      "Iteration 112, loss = 0.66511569\n",
      "Iteration 113, loss = 0.66964789\n",
      "Iteration 114, loss = 0.65747087\n",
      "Iteration 115, loss = 0.65952459\n",
      "Iteration 116, loss = 0.65003351\n",
      "Iteration 117, loss = 0.64855620\n",
      "Iteration 118, loss = 0.64469131\n",
      "Iteration 119, loss = 0.64019633\n",
      "Iteration 120, loss = 0.64526651\n",
      "Iteration 121, loss = 0.65048310\n",
      "Iteration 122, loss = 0.64120108\n",
      "Iteration 123, loss = 0.63831911\n",
      "Iteration 124, loss = 0.63607461\n",
      "Iteration 125, loss = 0.63453191\n",
      "Iteration 126, loss = 0.62757662\n",
      "Iteration 127, loss = 0.62793896\n",
      "Iteration 128, loss = 0.62710190\n",
      "Iteration 129, loss = 0.62356501\n",
      "Iteration 130, loss = 0.62296554\n",
      "Iteration 131, loss = 0.61769753\n",
      "Iteration 132, loss = 0.62083784\n",
      "Iteration 133, loss = 0.61820828\n",
      "Iteration 134, loss = 0.61529158\n",
      "Iteration 135, loss = 0.61573537\n",
      "Iteration 136, loss = 0.61381101\n",
      "Iteration 137, loss = 0.61065211\n",
      "Iteration 138, loss = 0.61151867\n",
      "Iteration 139, loss = 0.60696332\n",
      "Iteration 140, loss = 0.60099417\n",
      "Iteration 141, loss = 0.60113885\n",
      "Iteration 142, loss = 0.60037316\n",
      "Iteration 143, loss = 0.59892061\n",
      "Iteration 144, loss = 0.60551071\n",
      "Iteration 145, loss = 0.59195446\n",
      "Iteration 146, loss = 0.58862751\n",
      "Iteration 147, loss = 0.59402369\n",
      "Iteration 148, loss = 0.58877301\n",
      "Iteration 149, loss = 0.58375578\n",
      "Iteration 150, loss = 0.58578168\n",
      "Iteration 151, loss = 0.58495514\n",
      "Iteration 152, loss = 0.58399892\n",
      "Iteration 153, loss = 0.58562967\n",
      "Iteration 154, loss = 0.57752877\n",
      "Iteration 155, loss = 0.57504974\n",
      "Iteration 156, loss = 0.57513537\n",
      "Iteration 157, loss = 0.57219417\n",
      "Iteration 158, loss = 0.57196069\n",
      "Iteration 159, loss = 0.57940005\n",
      "Iteration 160, loss = 0.57011415\n",
      "Iteration 161, loss = 0.56516296\n",
      "Iteration 162, loss = 0.56952342\n",
      "Iteration 163, loss = 0.56513377\n",
      "Iteration 164, loss = 0.56746975\n",
      "Iteration 165, loss = 0.57315349\n",
      "Iteration 166, loss = 0.57418262\n",
      "Iteration 167, loss = 0.56308706\n",
      "Iteration 168, loss = 0.55972496\n",
      "Iteration 169, loss = 0.55375893\n",
      "Iteration 170, loss = 0.55776593\n",
      "Iteration 171, loss = 0.55455653\n",
      "Iteration 172, loss = 0.55541299\n",
      "Iteration 173, loss = 0.55431598\n",
      "Iteration 174, loss = 0.55050102\n",
      "Iteration 175, loss = 0.55279365\n",
      "Iteration 176, loss = 0.54657683\n",
      "Iteration 177, loss = 0.54778109\n",
      "Iteration 178, loss = 0.54033471\n",
      "Iteration 179, loss = 0.54019737\n",
      "Iteration 180, loss = 0.53691827\n",
      "Iteration 181, loss = 0.54356893\n",
      "Iteration 182, loss = 0.53644603\n",
      "Iteration 183, loss = 0.53799658\n",
      "Iteration 184, loss = 0.54269042\n",
      "Iteration 185, loss = 0.53195599\n",
      "Iteration 186, loss = 0.53796593\n",
      "Iteration 187, loss = 0.53457105\n",
      "Iteration 188, loss = 0.55490190\n",
      "Iteration 189, loss = 0.52881485\n",
      "Iteration 190, loss = 0.52538719\n",
      "Iteration 191, loss = 0.52967350\n",
      "Iteration 192, loss = 0.52967178\n",
      "Iteration 193, loss = 0.52396021\n",
      "Iteration 194, loss = 0.53515891\n",
      "Iteration 195, loss = 0.52315693\n",
      "Iteration 196, loss = 0.51670987\n",
      "Iteration 197, loss = 0.51951451\n",
      "Iteration 198, loss = 0.51596317\n",
      "Iteration 199, loss = 0.52294166\n",
      "Iteration 200, loss = 0.51119591\n",
      "Iteration 201, loss = 0.51657407\n",
      "Iteration 202, loss = 0.50992989\n",
      "Iteration 203, loss = 0.51559682\n",
      "Iteration 204, loss = 0.51922461\n",
      "Iteration 205, loss = 0.51240808\n",
      "Iteration 206, loss = 0.50757604\n",
      "Iteration 207, loss = 0.50606724\n",
      "Iteration 208, loss = 0.50732873\n",
      "Iteration 209, loss = 0.50025562\n",
      "Iteration 210, loss = 0.50280393\n",
      "Iteration 211, loss = 0.49921600\n",
      "Iteration 212, loss = 0.50418445\n",
      "Iteration 213, loss = 0.50173936\n",
      "Iteration 214, loss = 0.49961135\n",
      "Iteration 215, loss = 0.49328053\n",
      "Iteration 216, loss = 0.49937286\n",
      "Iteration 217, loss = 0.49761698\n",
      "Iteration 218, loss = 0.49419884\n",
      "Iteration 219, loss = 0.49107286\n",
      "Iteration 220, loss = 0.49212234\n",
      "Iteration 221, loss = 0.49501668\n",
      "Iteration 222, loss = 0.49417634\n",
      "Iteration 223, loss = 0.50482177\n",
      "Iteration 224, loss = 0.48179778\n",
      "Iteration 225, loss = 0.49307324\n",
      "Iteration 226, loss = 0.48701597\n",
      "Iteration 227, loss = 0.51212469\n",
      "Iteration 228, loss = 0.48143615\n",
      "Iteration 229, loss = 0.48788652\n",
      "Iteration 230, loss = 0.48818966\n",
      "Iteration 231, loss = 0.48480217\n",
      "Iteration 232, loss = 0.47617923\n",
      "Iteration 233, loss = 0.47722459\n",
      "Iteration 234, loss = 0.48037374\n",
      "Iteration 235, loss = 0.48116671\n",
      "Iteration 236, loss = 0.47459159\n",
      "Iteration 237, loss = 0.47843851\n",
      "Iteration 238, loss = 0.46841684\n",
      "Iteration 239, loss = 0.47527428\n",
      "Iteration 240, loss = 0.47176468\n",
      "Iteration 241, loss = 0.46872388\n",
      "Iteration 242, loss = 0.47074266\n",
      "Iteration 243, loss = 0.47751494\n",
      "Iteration 244, loss = 0.46596599\n",
      "Iteration 245, loss = 0.46252999\n",
      "Iteration 246, loss = 0.46828691\n",
      "Iteration 247, loss = 0.46773195\n",
      "Iteration 248, loss = 0.46299070\n",
      "Iteration 249, loss = 0.46730840\n",
      "Iteration 250, loss = 0.46237116\n",
      "Iteration 251, loss = 0.45852627\n",
      "Iteration 252, loss = 0.45588567\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 253, loss = 0.46388753\n",
      "Iteration 254, loss = 0.46265959\n",
      "Iteration 255, loss = 0.45814344\n",
      "Iteration 256, loss = 0.45974069\n",
      "Iteration 257, loss = 0.45340569\n",
      "Iteration 258, loss = 0.45457994\n",
      "Iteration 259, loss = 0.46932633\n",
      "Iteration 260, loss = 0.45158307\n",
      "Iteration 261, loss = 0.45006190\n",
      "Iteration 262, loss = 0.45476980\n",
      "Iteration 263, loss = 0.44927908\n",
      "Iteration 264, loss = 0.44940764\n",
      "Iteration 265, loss = 0.45088580\n",
      "Iteration 266, loss = 0.44807611\n",
      "Iteration 267, loss = 0.44340418\n",
      "Iteration 268, loss = 0.44457419\n",
      "Iteration 269, loss = 0.44857611\n",
      "Iteration 270, loss = 0.44191644\n",
      "Iteration 271, loss = 0.45394343\n",
      "Iteration 272, loss = 0.45128550\n",
      "Iteration 273, loss = 0.44046518\n",
      "Iteration 274, loss = 0.43816653\n",
      "Iteration 275, loss = 0.44260159\n",
      "Iteration 276, loss = 0.43665097\n",
      "Iteration 277, loss = 0.44387278\n",
      "Iteration 278, loss = 0.43650561\n",
      "Iteration 279, loss = 0.43759983\n",
      "Iteration 280, loss = 0.44192683\n",
      "Iteration 281, loss = 0.44361254\n",
      "Iteration 282, loss = 0.43590559\n",
      "Iteration 283, loss = 0.42966807\n",
      "Iteration 284, loss = 0.44022968\n",
      "Iteration 285, loss = 0.43273310\n",
      "Iteration 286, loss = 0.44501470\n",
      "Iteration 287, loss = 0.42708885\n",
      "Iteration 288, loss = 0.42770477\n",
      "Iteration 289, loss = 0.43688596\n",
      "Iteration 290, loss = 0.42917351\n",
      "Iteration 291, loss = 0.43012960\n",
      "Iteration 292, loss = 0.42809386\n",
      "Iteration 293, loss = 0.43056202\n",
      "Iteration 294, loss = 0.43549237\n",
      "Iteration 295, loss = 0.42990073\n",
      "Iteration 296, loss = 0.43050212\n",
      "Iteration 297, loss = 0.42901843\n",
      "Iteration 298, loss = 0.42741307\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.3958999147713387\n",
      "Iteration 1, loss = 16.63890588\n",
      "Iteration 2, loss = 3.36257026\n",
      "Iteration 3, loss = 2.33521411\n",
      "Iteration 4, loss = 1.96440511\n",
      "Iteration 5, loss = 1.79108983\n",
      "Iteration 6, loss = 1.69908364\n",
      "Iteration 7, loss = 1.64287709\n",
      "Iteration 8, loss = 1.60208415\n",
      "Iteration 9, loss = 1.57295936\n",
      "Iteration 10, loss = 1.54609256\n",
      "Iteration 11, loss = 1.52829073\n",
      "Iteration 12, loss = 1.50225504\n",
      "Iteration 13, loss = 1.48459711\n",
      "Iteration 14, loss = 1.47048323\n",
      "Iteration 15, loss = 1.45338468\n",
      "Iteration 16, loss = 1.42990109\n",
      "Iteration 17, loss = 1.41459658\n",
      "Iteration 18, loss = 1.39432381\n",
      "Iteration 19, loss = 1.37016066\n",
      "Iteration 20, loss = 1.36063379\n",
      "Iteration 21, loss = 1.33171592\n",
      "Iteration 22, loss = 1.31516410\n",
      "Iteration 23, loss = 1.29546570\n",
      "Iteration 24, loss = 1.27830024\n",
      "Iteration 25, loss = 1.25340102\n",
      "Iteration 26, loss = 1.23314014\n",
      "Iteration 27, loss = 1.21541293\n",
      "Iteration 28, loss = 1.19269777\n",
      "Iteration 29, loss = 1.17731651\n",
      "Iteration 30, loss = 1.15881815\n",
      "Iteration 31, loss = 1.14323165\n",
      "Iteration 32, loss = 1.12420082\n",
      "Iteration 33, loss = 1.10792762\n",
      "Iteration 34, loss = 1.08824277\n",
      "Iteration 35, loss = 1.07380318\n",
      "Iteration 36, loss = 1.06015719\n",
      "Iteration 37, loss = 1.04657737\n",
      "Iteration 38, loss = 1.03839467\n",
      "Iteration 39, loss = 1.01715540\n",
      "Iteration 40, loss = 1.00471273\n",
      "Iteration 41, loss = 0.99500658\n",
      "Iteration 42, loss = 0.97697275\n",
      "Iteration 43, loss = 0.97363328\n",
      "Iteration 44, loss = 0.95943997\n",
      "Iteration 45, loss = 0.95069034\n",
      "Iteration 46, loss = 0.94937051\n",
      "Iteration 47, loss = 0.93751063\n",
      "Iteration 48, loss = 0.93158776\n",
      "Iteration 49, loss = 0.92863616\n",
      "Iteration 50, loss = 0.91932520\n",
      "Iteration 51, loss = 0.91327931\n",
      "Iteration 52, loss = 0.89994399\n",
      "Iteration 53, loss = 0.89736113\n",
      "Iteration 54, loss = 0.89139064\n",
      "Iteration 55, loss = 0.88754516\n",
      "Iteration 56, loss = 0.87976705\n",
      "Iteration 57, loss = 0.87544229\n",
      "Iteration 58, loss = 0.87311705\n",
      "Iteration 59, loss = 0.86794666\n",
      "Iteration 60, loss = 0.86274333\n",
      "Iteration 61, loss = 0.85686346\n",
      "Iteration 62, loss = 0.85256633\n",
      "Iteration 63, loss = 0.84568026\n",
      "Iteration 64, loss = 0.84831144\n",
      "Iteration 65, loss = 0.84437720\n",
      "Iteration 66, loss = 0.83520119\n",
      "Iteration 67, loss = 0.83522765\n",
      "Iteration 68, loss = 0.82824392\n",
      "Iteration 69, loss = 0.82479610\n",
      "Iteration 70, loss = 0.82271664\n",
      "Iteration 71, loss = 0.81588358\n",
      "Iteration 72, loss = 0.81367259\n",
      "Iteration 73, loss = 0.80718105\n",
      "Iteration 74, loss = 0.80339972\n",
      "Iteration 75, loss = 0.80190586\n",
      "Iteration 76, loss = 0.79593660\n",
      "Iteration 77, loss = 0.79597274\n",
      "Iteration 78, loss = 0.80403419\n",
      "Iteration 79, loss = 0.78702095\n",
      "Iteration 80, loss = 0.78761662\n",
      "Iteration 81, loss = 0.78291632\n",
      "Iteration 82, loss = 0.77960690\n",
      "Iteration 83, loss = 0.77750856\n",
      "Iteration 84, loss = 0.76875883\n",
      "Iteration 85, loss = 0.77016609\n",
      "Iteration 86, loss = 0.75976050\n",
      "Iteration 87, loss = 0.75846928\n",
      "Iteration 88, loss = 0.76345588\n",
      "Iteration 89, loss = 0.75103295\n",
      "Iteration 90, loss = 0.74486996\n",
      "Iteration 91, loss = 0.74568717\n",
      "Iteration 92, loss = 0.74293761\n",
      "Iteration 93, loss = 0.73959043\n",
      "Iteration 94, loss = 0.73509668\n",
      "Iteration 95, loss = 0.73201940\n",
      "Iteration 96, loss = 0.72596475\n",
      "Iteration 97, loss = 0.73257595\n",
      "Iteration 98, loss = 0.75163840\n",
      "Iteration 99, loss = 0.77220251\n",
      "Iteration 100, loss = 0.72770514\n",
      "Iteration 101, loss = 0.72109741\n",
      "Iteration 102, loss = 0.72140720\n",
      "Iteration 103, loss = 0.71332324\n",
      "Iteration 104, loss = 0.70870309\n",
      "Iteration 105, loss = 0.71769794\n",
      "Iteration 106, loss = 0.71741935\n",
      "Iteration 107, loss = 0.70374804\n",
      "Iteration 108, loss = 0.69513822\n",
      "Iteration 109, loss = 0.69822673\n",
      "Iteration 110, loss = 0.71773354\n",
      "Iteration 111, loss = 0.69661325\n",
      "Iteration 112, loss = 0.69869786\n",
      "Iteration 113, loss = 0.69603222\n",
      "Iteration 114, loss = 0.68736883\n",
      "Iteration 115, loss = 0.68869767\n",
      "Iteration 116, loss = 0.68318151\n",
      "Iteration 117, loss = 0.67933783\n",
      "Iteration 118, loss = 0.67836160\n",
      "Iteration 119, loss = 0.67126421\n",
      "Iteration 120, loss = 0.67364944\n",
      "Iteration 121, loss = 0.67615035\n",
      "Iteration 122, loss = 0.67054729\n",
      "Iteration 123, loss = 0.66792190\n",
      "Iteration 124, loss = 0.67041211\n",
      "Iteration 125, loss = 0.66416389\n",
      "Iteration 126, loss = 0.66016925\n",
      "Iteration 127, loss = 0.65835543\n",
      "Iteration 128, loss = 0.66028150\n",
      "Iteration 129, loss = 0.65632721\n",
      "Iteration 130, loss = 0.65236284\n",
      "Iteration 131, loss = 0.65277514\n",
      "Iteration 132, loss = 0.65209780\n",
      "Iteration 133, loss = 0.65083926\n",
      "Iteration 134, loss = 0.64076606\n",
      "Iteration 135, loss = 0.64562993\n",
      "Iteration 136, loss = 0.64101830\n",
      "Iteration 137, loss = 0.63658816\n",
      "Iteration 138, loss = 0.64298040\n",
      "Iteration 139, loss = 0.64945573\n",
      "Iteration 140, loss = 0.63522067\n",
      "Iteration 141, loss = 0.62961230\n",
      "Iteration 142, loss = 0.63342746\n",
      "Iteration 143, loss = 0.62554611\n",
      "Iteration 144, loss = 0.63866336\n",
      "Iteration 145, loss = 0.62181667\n",
      "Iteration 146, loss = 0.62188617\n",
      "Iteration 147, loss = 0.62292317\n",
      "Iteration 148, loss = 0.62102147\n",
      "Iteration 149, loss = 0.61382841\n",
      "Iteration 150, loss = 0.62026782\n",
      "Iteration 151, loss = 0.61712958\n",
      "Iteration 152, loss = 0.60972292\n",
      "Iteration 153, loss = 0.61460287\n",
      "Iteration 154, loss = 0.60616410\n",
      "Iteration 155, loss = 0.60243900\n",
      "Iteration 156, loss = 0.60693867\n",
      "Iteration 157, loss = 0.60249536\n",
      "Iteration 158, loss = 0.60089882\n",
      "Iteration 159, loss = 0.61006989\n",
      "Iteration 160, loss = 0.60460668\n",
      "Iteration 161, loss = 0.59321588\n",
      "Iteration 162, loss = 0.59967104\n",
      "Iteration 163, loss = 0.58948224\n",
      "Iteration 164, loss = 0.59682007\n",
      "Iteration 165, loss = 0.59064227\n",
      "Iteration 166, loss = 0.59478353\n",
      "Iteration 167, loss = 0.58896722\n",
      "Iteration 168, loss = 0.58735638\n",
      "Iteration 169, loss = 0.58519902\n",
      "Iteration 170, loss = 0.58658574\n",
      "Iteration 171, loss = 0.57857478\n",
      "Iteration 172, loss = 0.58148353\n",
      "Iteration 173, loss = 0.57966563\n",
      "Iteration 174, loss = 0.57602819\n",
      "Iteration 175, loss = 0.57805113\n",
      "Iteration 176, loss = 0.57461964\n",
      "Iteration 177, loss = 0.57325683\n",
      "Iteration 178, loss = 0.56871095\n",
      "Iteration 179, loss = 0.56670256\n",
      "Iteration 180, loss = 0.56804581\n",
      "Iteration 181, loss = 0.56425161\n",
      "Iteration 182, loss = 0.56116877\n",
      "Iteration 183, loss = 0.56757120\n",
      "Iteration 184, loss = 0.56802959\n",
      "Iteration 185, loss = 0.55898894\n",
      "Iteration 186, loss = 0.56971428\n",
      "Iteration 187, loss = 0.56379404\n",
      "Iteration 188, loss = 0.60171116\n",
      "Iteration 189, loss = 0.55575054\n",
      "Iteration 190, loss = 0.55483168\n",
      "Iteration 191, loss = 0.55475620\n",
      "Iteration 192, loss = 0.54888413\n",
      "Iteration 193, loss = 0.55457035\n",
      "Iteration 194, loss = 0.56291778\n",
      "Iteration 195, loss = 0.54729634\n",
      "Iteration 196, loss = 0.54454602\n",
      "Iteration 197, loss = 0.54706198\n",
      "Iteration 198, loss = 0.54980158\n",
      "Iteration 199, loss = 0.54754380\n",
      "Iteration 200, loss = 0.53941162\n",
      "Iteration 201, loss = 0.53797945\n",
      "Iteration 202, loss = 0.54070259\n",
      "Iteration 203, loss = 0.54108465\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 204, loss = 0.54222297\n",
      "Iteration 205, loss = 0.53707194\n",
      "Iteration 206, loss = 0.53602688\n",
      "Iteration 207, loss = 0.53965622\n",
      "Iteration 208, loss = 0.54255016\n",
      "Iteration 209, loss = 0.52683815\n",
      "Iteration 210, loss = 0.52952162\n",
      "Iteration 211, loss = 0.52870887\n",
      "Iteration 212, loss = 0.53513298\n",
      "Iteration 213, loss = 0.53194740\n",
      "Iteration 214, loss = 0.53115235\n",
      "Iteration 215, loss = 0.52418280\n",
      "Iteration 216, loss = 0.52562560\n",
      "Iteration 217, loss = 0.52206913\n",
      "Iteration 218, loss = 0.52191577\n",
      "Iteration 219, loss = 0.52175578\n",
      "Iteration 220, loss = 0.51584238\n",
      "Iteration 221, loss = 0.52214769\n",
      "Iteration 222, loss = 0.52602603\n",
      "Iteration 223, loss = 0.52786037\n",
      "Iteration 224, loss = 0.51182017\n",
      "Iteration 225, loss = 0.51755767\n",
      "Iteration 226, loss = 0.51949150\n",
      "Iteration 227, loss = 0.54963712\n",
      "Iteration 228, loss = 0.51561845\n",
      "Iteration 229, loss = 0.51507033\n",
      "Iteration 230, loss = 0.51339668\n",
      "Iteration 231, loss = 0.51396447\n",
      "Iteration 232, loss = 0.51219134\n",
      "Iteration 233, loss = 0.51248939\n",
      "Iteration 234, loss = 0.50544027\n",
      "Iteration 235, loss = 0.51123316\n",
      "Iteration 236, loss = 0.50671687\n",
      "Iteration 237, loss = 0.50714055\n",
      "Iteration 238, loss = 0.49856699\n",
      "Iteration 239, loss = 0.50415590\n",
      "Iteration 240, loss = 0.49842881\n",
      "Iteration 241, loss = 0.50103897\n",
      "Iteration 242, loss = 0.49664598\n",
      "Iteration 243, loss = 0.49791912\n",
      "Iteration 244, loss = 0.49928605\n",
      "Iteration 245, loss = 0.49429551\n",
      "Iteration 246, loss = 0.49654049\n",
      "Iteration 247, loss = 0.49625054\n",
      "Iteration 248, loss = 0.49241794\n",
      "Iteration 249, loss = 0.49375812\n",
      "Iteration 250, loss = 0.49489468\n",
      "Iteration 251, loss = 0.48924556\n",
      "Iteration 252, loss = 0.48724812\n",
      "Iteration 253, loss = 0.48826567\n",
      "Iteration 254, loss = 0.49590399\n",
      "Iteration 255, loss = 0.48832705\n",
      "Iteration 256, loss = 0.48612623\n",
      "Iteration 257, loss = 0.48688263\n",
      "Iteration 258, loss = 0.48251977\n",
      "Iteration 259, loss = 0.48653500\n",
      "Iteration 260, loss = 0.47842684\n",
      "Iteration 261, loss = 0.47795952\n",
      "Iteration 262, loss = 0.48056448\n",
      "Iteration 263, loss = 0.47902539\n",
      "Iteration 264, loss = 0.48162740\n",
      "Iteration 265, loss = 0.48308878\n",
      "Iteration 266, loss = 0.48170327\n",
      "Iteration 267, loss = 0.46859267\n",
      "Iteration 268, loss = 0.47746170\n",
      "Iteration 269, loss = 0.47981998\n",
      "Iteration 270, loss = 0.46959549\n",
      "Iteration 271, loss = 0.48363529\n",
      "Iteration 272, loss = 0.47390966\n",
      "Iteration 273, loss = 0.46702013\n",
      "Iteration 274, loss = 0.46931314\n",
      "Iteration 275, loss = 0.47230336\n",
      "Iteration 276, loss = 0.46973917\n",
      "Iteration 277, loss = 0.47191653\n",
      "Iteration 278, loss = 0.46721249\n",
      "Iteration 279, loss = 0.46767775\n",
      "Iteration 280, loss = 0.47538257\n",
      "Iteration 281, loss = 0.46934216\n",
      "Iteration 282, loss = 0.47109024\n",
      "Iteration 283, loss = 0.46049706\n",
      "Iteration 284, loss = 0.46553998\n",
      "Iteration 285, loss = 0.46245970\n",
      "Iteration 286, loss = 0.46654014\n",
      "Iteration 287, loss = 0.45539413\n",
      "Iteration 288, loss = 0.45889307\n",
      "Iteration 289, loss = 0.46259867\n",
      "Iteration 290, loss = 0.45831084\n",
      "Iteration 291, loss = 0.45883961\n",
      "Iteration 292, loss = 0.45440468\n",
      "Iteration 293, loss = 0.45819364\n",
      "Iteration 294, loss = 0.45675667\n",
      "Iteration 295, loss = 0.45855344\n",
      "Iteration 296, loss = 0.45572248\n",
      "Iteration 297, loss = 0.45154439\n",
      "Iteration 298, loss = 0.45689481\n",
      "Iteration 299, loss = 0.45050952\n",
      "Iteration 300, loss = 0.45260831\n",
      "Iteration 301, loss = 0.45091084\n",
      "Iteration 302, loss = 0.45236969\n",
      "Iteration 303, loss = 0.45384859\n",
      "Iteration 304, loss = 0.44604969\n",
      "Iteration 305, loss = 0.44643973\n",
      "Iteration 306, loss = 0.44708856\n",
      "Iteration 307, loss = 0.44600165\n",
      "Iteration 308, loss = 0.45021996\n",
      "Iteration 309, loss = 0.45252284\n",
      "Iteration 310, loss = 0.44490314\n",
      "Iteration 311, loss = 0.44370996\n",
      "Iteration 312, loss = 0.45104801\n",
      "Iteration 313, loss = 0.44486526\n",
      "Iteration 314, loss = 0.44569291\n",
      "Iteration 315, loss = 0.44646018\n",
      "Iteration 316, loss = 0.44341725\n",
      "Iteration 317, loss = 0.43926880\n",
      "Iteration 318, loss = 0.43667826\n",
      "Iteration 319, loss = 0.44235963\n",
      "Iteration 320, loss = 0.44205266\n",
      "Iteration 321, loss = 0.44357132\n",
      "Iteration 322, loss = 0.43917231\n",
      "Iteration 323, loss = 0.44316184\n",
      "Iteration 324, loss = 0.43473826\n",
      "Iteration 325, loss = 0.44071544\n",
      "Iteration 326, loss = 0.43774767\n",
      "Iteration 327, loss = 0.43570505\n",
      "Iteration 328, loss = 0.43034156\n",
      "Iteration 329, loss = 0.43775032\n",
      "Iteration 330, loss = 0.43769731\n",
      "Iteration 331, loss = 0.43717672\n",
      "Iteration 332, loss = 0.43319910\n",
      "Iteration 333, loss = 0.42872271\n",
      "Iteration 334, loss = 0.43826526\n",
      "Iteration 335, loss = 0.43147654\n",
      "Iteration 336, loss = 0.43378768\n",
      "Iteration 337, loss = 0.43065044\n",
      "Iteration 338, loss = 0.43676730\n",
      "Iteration 339, loss = 0.42968208\n",
      "Iteration 340, loss = 0.42912474\n",
      "Iteration 341, loss = 0.43266443\n",
      "Iteration 342, loss = 0.43452474\n",
      "Iteration 343, loss = 0.43321480\n",
      "Iteration 344, loss = 0.42239027\n",
      "Iteration 345, loss = 0.42269936\n",
      "Iteration 346, loss = 0.42750788\n",
      "Iteration 347, loss = 0.42493354\n",
      "Iteration 348, loss = 0.42229563\n",
      "Iteration 349, loss = 0.42971781\n",
      "Iteration 350, loss = 0.42543734\n",
      "Iteration 351, loss = 0.43635351\n",
      "Iteration 352, loss = 0.42604475\n",
      "Iteration 353, loss = 0.41925710\n",
      "Iteration 354, loss = 0.42567874\n",
      "Iteration 355, loss = 0.41824528\n",
      "Iteration 356, loss = 0.42427479\n",
      "Iteration 357, loss = 0.41660923\n",
      "Iteration 358, loss = 0.42523699\n",
      "Iteration 359, loss = 0.42545610\n",
      "Iteration 360, loss = 0.41774003\n",
      "Iteration 361, loss = 0.41867118\n",
      "Iteration 362, loss = 0.42017112\n",
      "Iteration 363, loss = 0.42315012\n",
      "Iteration 364, loss = 0.41426192\n",
      "Iteration 365, loss = 0.42303887\n",
      "Iteration 366, loss = 0.41305305\n",
      "Iteration 367, loss = 0.41347576\n",
      "Iteration 368, loss = 0.41771323\n",
      "Iteration 369, loss = 0.41579838\n",
      "Iteration 370, loss = 0.41844178\n",
      "Iteration 371, loss = 0.41110930\n",
      "Iteration 372, loss = 0.41287658\n",
      "Iteration 373, loss = 0.41470120\n",
      "Iteration 374, loss = 0.41611002\n",
      "Iteration 375, loss = 0.40776181\n",
      "Iteration 376, loss = 0.41264737\n",
      "Iteration 377, loss = 0.40845328\n",
      "Iteration 378, loss = 0.41655383\n",
      "Iteration 379, loss = 0.41459573\n",
      "Iteration 380, loss = 0.41286389\n",
      "Iteration 381, loss = 0.40710321\n",
      "Iteration 382, loss = 0.41884643\n",
      "Iteration 383, loss = 0.41416293\n",
      "Iteration 384, loss = 0.42274423\n",
      "Iteration 385, loss = 0.40995967\n",
      "Iteration 386, loss = 0.41706702\n",
      "Iteration 387, loss = 0.42637491\n",
      "Iteration 388, loss = 0.41039781\n",
      "Iteration 389, loss = 0.40707800\n",
      "Iteration 390, loss = 0.40731497\n",
      "Iteration 391, loss = 0.40434710\n",
      "Iteration 392, loss = 0.40548310\n",
      "Iteration 393, loss = 0.40569838\n",
      "Iteration 394, loss = 0.40405142\n",
      "Iteration 395, loss = 0.40407750\n",
      "Iteration 396, loss = 0.40049734\n",
      "Iteration 397, loss = 0.41037856\n",
      "Iteration 398, loss = 0.39754870\n",
      "Iteration 399, loss = 0.41079701\n",
      "Iteration 400, loss = 0.39912590\n",
      "Iteration 401, loss = 0.40746869\n",
      "Iteration 402, loss = 0.40383763\n",
      "Iteration 403, loss = 0.40153444\n",
      "Iteration 404, loss = 0.39906948\n",
      "Iteration 405, loss = 0.40379016\n",
      "Iteration 406, loss = 0.40256501\n",
      "Iteration 407, loss = 0.40044184\n",
      "Iteration 408, loss = 0.40816367\n",
      "Iteration 409, loss = 0.39610694\n",
      "Iteration 410, loss = 0.39634751\n",
      "Iteration 411, loss = 0.39913007\n",
      "Iteration 412, loss = 0.39787938\n",
      "Iteration 413, loss = 0.39457055\n",
      "Iteration 414, loss = 0.39228311\n",
      "Iteration 415, loss = 0.41862890\n",
      "Iteration 416, loss = 0.40069485\n",
      "Iteration 417, loss = 0.40408235\n",
      "Iteration 418, loss = 0.40168002\n",
      "Iteration 419, loss = 0.39967984\n",
      "Iteration 420, loss = 0.39970076\n",
      "Iteration 421, loss = 0.40839795\n",
      "Iteration 422, loss = 0.40021307\n",
      "Iteration 423, loss = 0.39477518\n",
      "Iteration 424, loss = 0.39715401\n",
      "Iteration 425, loss = 0.39234154\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "1.9150046789965516\n",
      "Iteration 1, loss = 16.61423756\n",
      "Iteration 2, loss = 3.32235115\n",
      "Iteration 3, loss = 2.29579306\n",
      "Iteration 4, loss = 1.92626589\n",
      "Iteration 5, loss = 1.75593769\n",
      "Iteration 6, loss = 1.66104294\n",
      "Iteration 7, loss = 1.61203665\n",
      "Iteration 8, loss = 1.57121778\n",
      "Iteration 9, loss = 1.54287166\n",
      "Iteration 10, loss = 1.51583784\n",
      "Iteration 11, loss = 1.49876493\n",
      "Iteration 12, loss = 1.47229112\n",
      "Iteration 13, loss = 1.45559655\n",
      "Iteration 14, loss = 1.44455768\n",
      "Iteration 15, loss = 1.42407020\n",
      "Iteration 16, loss = 1.40271665\n",
      "Iteration 17, loss = 1.38473209\n",
      "Iteration 18, loss = 1.36471323\n",
      "Iteration 19, loss = 1.34438899\n",
      "Iteration 20, loss = 1.33266378\n",
      "Iteration 21, loss = 1.30777541\n",
      "Iteration 22, loss = 1.29159209\n",
      "Iteration 23, loss = 1.27173055\n",
      "Iteration 24, loss = 1.25613716\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 25, loss = 1.23494251\n",
      "Iteration 26, loss = 1.21422641\n",
      "Iteration 27, loss = 1.19512143\n",
      "Iteration 28, loss = 1.17195363\n",
      "Iteration 29, loss = 1.15514109\n",
      "Iteration 30, loss = 1.14025424\n",
      "Iteration 31, loss = 1.12016197\n",
      "Iteration 32, loss = 1.10163686\n",
      "Iteration 33, loss = 1.08736874\n",
      "Iteration 34, loss = 1.07108158\n",
      "Iteration 35, loss = 1.05460327\n",
      "Iteration 36, loss = 1.04198855\n",
      "Iteration 37, loss = 1.02671159\n",
      "Iteration 38, loss = 1.01724409\n",
      "Iteration 39, loss = 0.99931052\n",
      "Iteration 40, loss = 0.98956401\n",
      "Iteration 41, loss = 0.97810460\n",
      "Iteration 42, loss = 0.96169039\n",
      "Iteration 43, loss = 0.95378900\n",
      "Iteration 44, loss = 0.94173352\n",
      "Iteration 45, loss = 0.93330894\n",
      "Iteration 46, loss = 0.93177060\n",
      "Iteration 47, loss = 0.92126531\n",
      "Iteration 48, loss = 0.91960089\n",
      "Iteration 49, loss = 0.90669489\n",
      "Iteration 50, loss = 0.89859262\n",
      "Iteration 51, loss = 0.89777455\n",
      "Iteration 52, loss = 0.88227651\n",
      "Iteration 53, loss = 0.88099685\n",
      "Iteration 54, loss = 0.87538468\n",
      "Iteration 55, loss = 0.86692904\n",
      "Iteration 56, loss = 0.86523603\n",
      "Iteration 57, loss = 0.85649914\n",
      "Iteration 58, loss = 0.85457723\n",
      "Iteration 59, loss = 0.85195074\n",
      "Iteration 60, loss = 0.85277729\n",
      "Iteration 61, loss = 0.84194444\n",
      "Iteration 62, loss = 0.83534638\n",
      "Iteration 63, loss = 0.83482558\n",
      "Iteration 64, loss = 0.82840435\n",
      "Iteration 65, loss = 0.82821516\n",
      "Iteration 66, loss = 0.81628133\n",
      "Iteration 67, loss = 0.81541631\n",
      "Iteration 68, loss = 0.81230334\n",
      "Iteration 69, loss = 0.80375075\n",
      "Iteration 70, loss = 0.80916836\n",
      "Iteration 71, loss = 0.79491030\n",
      "Iteration 72, loss = 0.79612157\n",
      "Iteration 73, loss = 0.79095001\n",
      "Iteration 74, loss = 0.79053442\n",
      "Iteration 75, loss = 0.78636224\n",
      "Iteration 76, loss = 0.77734431\n",
      "Iteration 77, loss = 0.77827090\n",
      "Iteration 78, loss = 0.77501129\n",
      "Iteration 79, loss = 0.76764397\n",
      "Iteration 80, loss = 0.77147709\n",
      "Iteration 81, loss = 0.76818639\n",
      "Iteration 82, loss = 0.76190783\n",
      "Iteration 83, loss = 0.76405455\n",
      "Iteration 84, loss = 0.75539575\n",
      "Iteration 85, loss = 0.74776907\n",
      "Iteration 86, loss = 0.74425642\n",
      "Iteration 87, loss = 0.74726309\n",
      "Iteration 88, loss = 0.74776706\n",
      "Iteration 89, loss = 0.73792345\n",
      "Iteration 90, loss = 0.72968362\n",
      "Iteration 91, loss = 0.73046320\n",
      "Iteration 92, loss = 0.73135732\n",
      "Iteration 93, loss = 0.72805872\n",
      "Iteration 94, loss = 0.71938668\n",
      "Iteration 95, loss = 0.72147186\n",
      "Iteration 96, loss = 0.71545658\n",
      "Iteration 97, loss = 0.71695865\n",
      "Iteration 98, loss = 0.74094572\n",
      "Iteration 99, loss = 0.75395685\n",
      "Iteration 100, loss = 0.71533554\n",
      "Iteration 101, loss = 0.70760185\n",
      "Iteration 102, loss = 0.70169375\n",
      "Iteration 103, loss = 0.70112667\n",
      "Iteration 104, loss = 0.69799116\n",
      "Iteration 105, loss = 0.69263095\n",
      "Iteration 106, loss = 0.69840149\n",
      "Iteration 107, loss = 0.69734571\n",
      "Iteration 108, loss = 0.68328581\n",
      "Iteration 109, loss = 0.68204572\n",
      "Iteration 110, loss = 0.69092726\n",
      "Iteration 111, loss = 0.68197037\n",
      "Iteration 112, loss = 0.68373467\n",
      "Iteration 113, loss = 0.67803151\n",
      "Iteration 114, loss = 0.67779752\n",
      "Iteration 115, loss = 0.67959697\n",
      "Iteration 116, loss = 0.66650757\n",
      "Iteration 117, loss = 0.67196517\n",
      "Iteration 118, loss = 0.66445562\n",
      "Iteration 119, loss = 0.66180256\n",
      "Iteration 120, loss = 0.66264422\n",
      "Iteration 121, loss = 0.66387838\n",
      "Iteration 122, loss = 0.65458437\n",
      "Iteration 123, loss = 0.65477948\n",
      "Iteration 124, loss = 0.66035505\n",
      "Iteration 125, loss = 0.64957474\n",
      "Iteration 126, loss = 0.64753943\n",
      "Iteration 127, loss = 0.64597239\n",
      "Iteration 128, loss = 0.64863079\n",
      "Iteration 129, loss = 0.64151818\n",
      "Iteration 130, loss = 0.63684860\n",
      "Iteration 131, loss = 0.63819587\n",
      "Iteration 132, loss = 0.63898523\n",
      "Iteration 133, loss = 0.63722058\n",
      "Iteration 134, loss = 0.63088404\n",
      "Iteration 135, loss = 0.63863491\n",
      "Iteration 136, loss = 0.63042263\n",
      "Iteration 137, loss = 0.62598459\n",
      "Iteration 138, loss = 0.62730420\n",
      "Iteration 139, loss = 0.62777954\n",
      "Iteration 140, loss = 0.62105061\n",
      "Iteration 141, loss = 0.62052648\n",
      "Iteration 142, loss = 0.62017900\n",
      "Iteration 143, loss = 0.62634037\n",
      "Iteration 144, loss = 0.62460546\n",
      "Iteration 145, loss = 0.61644292\n",
      "Iteration 146, loss = 0.61418646\n",
      "Iteration 147, loss = 0.61967799\n",
      "Iteration 148, loss = 0.61462002\n",
      "Iteration 149, loss = 0.60455612\n",
      "Iteration 150, loss = 0.61315951\n",
      "Iteration 151, loss = 0.60773721\n",
      "Iteration 152, loss = 0.60320516\n",
      "Iteration 153, loss = 0.60632284\n",
      "Iteration 154, loss = 0.59590546\n",
      "Iteration 155, loss = 0.59537063\n",
      "Iteration 156, loss = 0.60196958\n",
      "Iteration 157, loss = 0.60024692\n",
      "Iteration 158, loss = 0.59315764\n",
      "Iteration 159, loss = 0.61039468\n",
      "Iteration 160, loss = 0.60164102\n",
      "Iteration 161, loss = 0.59008951\n",
      "Iteration 162, loss = 0.59727196\n",
      "Iteration 163, loss = 0.58644035\n",
      "Iteration 164, loss = 0.59048086\n",
      "Iteration 165, loss = 0.59018020\n",
      "Iteration 166, loss = 0.58588621\n",
      "Iteration 167, loss = 0.58770853\n",
      "Iteration 168, loss = 0.58159616\n",
      "Iteration 169, loss = 0.58170391\n",
      "Iteration 170, loss = 0.58088786\n",
      "Iteration 171, loss = 0.57639617\n",
      "Iteration 172, loss = 0.58096145\n",
      "Iteration 173, loss = 0.56924660\n",
      "Iteration 174, loss = 0.57195790\n",
      "Iteration 175, loss = 0.57428294\n",
      "Iteration 176, loss = 0.56604559\n",
      "Iteration 177, loss = 0.56411114\n",
      "Iteration 178, loss = 0.57145663\n",
      "Iteration 179, loss = 0.56681909\n",
      "Iteration 180, loss = 0.56638087\n",
      "Iteration 181, loss = 0.56426409\n",
      "Iteration 182, loss = 0.55785222\n",
      "Iteration 183, loss = 0.55776632\n",
      "Iteration 184, loss = 0.56351367\n",
      "Iteration 185, loss = 0.55691700\n",
      "Iteration 186, loss = 0.56931614\n",
      "Iteration 187, loss = 0.55557520\n",
      "Iteration 188, loss = 0.60796822\n",
      "Iteration 189, loss = 0.55636522\n",
      "Iteration 190, loss = 0.55423839\n",
      "Iteration 191, loss = 0.55442916\n",
      "Iteration 192, loss = 0.54816329\n",
      "Iteration 193, loss = 0.55147595\n",
      "Iteration 194, loss = 0.55451458\n",
      "Iteration 195, loss = 0.55053364\n",
      "Iteration 196, loss = 0.54574373\n",
      "Iteration 197, loss = 0.54424576\n",
      "Iteration 198, loss = 0.54447401\n",
      "Iteration 199, loss = 0.54247884\n",
      "Iteration 200, loss = 0.53396790\n",
      "Iteration 201, loss = 0.53755021\n",
      "Iteration 202, loss = 0.54577417\n",
      "Iteration 203, loss = 0.54291451\n",
      "Iteration 204, loss = 0.54147182\n",
      "Iteration 205, loss = 0.53782513\n",
      "Iteration 206, loss = 0.53727504\n",
      "Iteration 207, loss = 0.54083900\n",
      "Iteration 208, loss = 0.53944656\n",
      "Iteration 209, loss = 0.52404996\n",
      "Iteration 210, loss = 0.52849105\n",
      "Iteration 211, loss = 0.52900058\n",
      "Iteration 212, loss = 0.52855840\n",
      "Iteration 213, loss = 0.53201027\n",
      "Iteration 214, loss = 0.53021040\n",
      "Iteration 215, loss = 0.52169218\n",
      "Iteration 216, loss = 0.52316213\n",
      "Iteration 217, loss = 0.51965209\n",
      "Iteration 218, loss = 0.51891159\n",
      "Iteration 219, loss = 0.52231827\n",
      "Iteration 220, loss = 0.51511529\n",
      "Iteration 221, loss = 0.52816350\n",
      "Iteration 222, loss = 0.51878233\n",
      "Iteration 223, loss = 0.52572725\n",
      "Iteration 224, loss = 0.50994451\n",
      "Iteration 225, loss = 0.51055444\n",
      "Iteration 226, loss = 0.51990048\n",
      "Iteration 227, loss = 0.54346275\n",
      "Iteration 228, loss = 0.51035955\n",
      "Iteration 229, loss = 0.51301183\n",
      "Iteration 230, loss = 0.51364712\n",
      "Iteration 231, loss = 0.50793040\n",
      "Iteration 232, loss = 0.51102214\n",
      "Iteration 233, loss = 0.51088107\n",
      "Iteration 234, loss = 0.50203419\n",
      "Iteration 235, loss = 0.50643664\n",
      "Iteration 236, loss = 0.50239155\n",
      "Iteration 237, loss = 0.50290921\n",
      "Iteration 238, loss = 0.49930666\n",
      "Iteration 239, loss = 0.49930063\n",
      "Iteration 240, loss = 0.49569701\n",
      "Iteration 241, loss = 0.49573295\n",
      "Iteration 242, loss = 0.50015140\n",
      "Iteration 243, loss = 0.49604843\n",
      "Iteration 244, loss = 0.49645705\n",
      "Iteration 245, loss = 0.49201813\n",
      "Iteration 246, loss = 0.49005921\n",
      "Iteration 247, loss = 0.49696016\n",
      "Iteration 248, loss = 0.49017887\n",
      "Iteration 249, loss = 0.49373487\n",
      "Iteration 250, loss = 0.49242558\n",
      "Iteration 251, loss = 0.48479146\n",
      "Iteration 252, loss = 0.48267489\n",
      "Iteration 253, loss = 0.48677434\n",
      "Iteration 254, loss = 0.48884662\n",
      "Iteration 255, loss = 0.48641657\n",
      "Iteration 256, loss = 0.48549360\n",
      "Iteration 257, loss = 0.48106740\n",
      "Iteration 258, loss = 0.48240563\n",
      "Iteration 259, loss = 0.48248597\n",
      "Iteration 260, loss = 0.47687634\n",
      "Iteration 261, loss = 0.47867204\n",
      "Iteration 262, loss = 0.47826399\n",
      "Iteration 263, loss = 0.47846918\n",
      "Iteration 264, loss = 0.48128722\n",
      "Iteration 265, loss = 0.47352955\n",
      "Iteration 266, loss = 0.47638664\n",
      "Iteration 267, loss = 0.46694477\n",
      "Iteration 268, loss = 0.47480845\n",
      "Iteration 269, loss = 0.47627478\n",
      "Iteration 270, loss = 0.46918921\n",
      "Iteration 271, loss = 0.48088926\n",
      "Iteration 272, loss = 0.46833715\n",
      "Iteration 273, loss = 0.46572913\n",
      "Iteration 274, loss = 0.46616180\n",
      "Iteration 275, loss = 0.46025276\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 276, loss = 0.46668577\n",
      "Iteration 277, loss = 0.46896180\n",
      "Iteration 278, loss = 0.46447151\n",
      "Iteration 279, loss = 0.46485765\n",
      "Iteration 280, loss = 0.46565762\n",
      "Iteration 281, loss = 0.46579540\n",
      "Iteration 282, loss = 0.46715092\n",
      "Iteration 283, loss = 0.46862624\n",
      "Iteration 284, loss = 0.46637137\n",
      "Iteration 285, loss = 0.45869146\n",
      "Iteration 286, loss = 0.47113981\n",
      "Iteration 287, loss = 0.45509227\n",
      "Iteration 288, loss = 0.45356574\n",
      "Iteration 289, loss = 0.46052561\n",
      "Iteration 290, loss = 0.45300129\n",
      "Iteration 291, loss = 0.45034165\n",
      "Iteration 292, loss = 0.45224657\n",
      "Iteration 293, loss = 0.45611122\n",
      "Iteration 294, loss = 0.45189202\n",
      "Iteration 295, loss = 0.45289008\n",
      "Iteration 296, loss = 0.45109400\n",
      "Iteration 297, loss = 0.44950861\n",
      "Iteration 298, loss = 0.44858743\n",
      "Iteration 299, loss = 0.44607211\n",
      "Iteration 300, loss = 0.44991051\n",
      "Iteration 301, loss = 0.45115302\n",
      "Iteration 302, loss = 0.44878281\n",
      "Iteration 303, loss = 0.44747385\n",
      "Iteration 304, loss = 0.44494731\n",
      "Iteration 305, loss = 0.44528030\n",
      "Iteration 306, loss = 0.44362226\n",
      "Iteration 307, loss = 0.44425154\n",
      "Iteration 308, loss = 0.44870000\n",
      "Iteration 309, loss = 0.44550290\n",
      "Iteration 310, loss = 0.43846245\n",
      "Iteration 311, loss = 0.43895238\n",
      "Iteration 312, loss = 0.44454485\n",
      "Iteration 313, loss = 0.44718846\n",
      "Iteration 314, loss = 0.44735605\n",
      "Iteration 315, loss = 0.44101795\n",
      "Iteration 316, loss = 0.43669569\n",
      "Iteration 317, loss = 0.43491622\n",
      "Iteration 318, loss = 0.43356842\n",
      "Iteration 319, loss = 0.43722266\n",
      "Iteration 320, loss = 0.43687382\n",
      "Iteration 321, loss = 0.43841328\n",
      "Iteration 322, loss = 0.43351095\n",
      "Iteration 323, loss = 0.44052058\n",
      "Iteration 324, loss = 0.43445615\n",
      "Iteration 325, loss = 0.43418793\n",
      "Iteration 326, loss = 0.43712803\n",
      "Iteration 327, loss = 0.44100945\n",
      "Iteration 328, loss = 0.42604662\n",
      "Iteration 329, loss = 0.43548048\n",
      "Iteration 330, loss = 0.43348029\n",
      "Iteration 331, loss = 0.42781747\n",
      "Iteration 332, loss = 0.42544635\n",
      "Iteration 333, loss = 0.42487270\n",
      "Iteration 334, loss = 0.43855119\n",
      "Iteration 335, loss = 0.42850469\n",
      "Iteration 336, loss = 0.42808624\n",
      "Iteration 337, loss = 0.42824291\n",
      "Iteration 338, loss = 0.43436101\n",
      "Iteration 339, loss = 0.42961113\n",
      "Iteration 340, loss = 0.42453718\n",
      "Iteration 341, loss = 0.43239469\n",
      "Iteration 342, loss = 0.43002216\n",
      "Iteration 343, loss = 0.43051491\n",
      "Iteration 344, loss = 0.42236016\n",
      "Iteration 345, loss = 0.42391313\n",
      "Iteration 346, loss = 0.42976927\n",
      "Iteration 347, loss = 0.41803534\n",
      "Iteration 348, loss = 0.41784702\n",
      "Iteration 349, loss = 0.42237531\n",
      "Iteration 350, loss = 0.42292251\n",
      "Iteration 351, loss = 0.43153479\n",
      "Iteration 352, loss = 0.42095559\n",
      "Iteration 353, loss = 0.41426329\n",
      "Iteration 354, loss = 0.42440413\n",
      "Iteration 355, loss = 0.41419453\n",
      "Iteration 356, loss = 0.42214832\n",
      "Iteration 357, loss = 0.41802527\n",
      "Iteration 358, loss = 0.42158015\n",
      "Iteration 359, loss = 0.41580402\n",
      "Iteration 360, loss = 0.41115298\n",
      "Iteration 361, loss = 0.41633348\n",
      "Iteration 362, loss = 0.41586664\n",
      "Iteration 363, loss = 0.41502329\n",
      "Iteration 364, loss = 0.41130782\n",
      "Iteration 365, loss = 0.42038519\n",
      "Iteration 366, loss = 0.40833420\n",
      "Iteration 367, loss = 0.40824756\n",
      "Iteration 368, loss = 0.40988267\n",
      "Iteration 369, loss = 0.41626880\n",
      "Iteration 370, loss = 0.41972710\n",
      "Iteration 371, loss = 0.40978297\n",
      "Iteration 372, loss = 0.41207976\n",
      "Iteration 373, loss = 0.40629014\n",
      "Iteration 374, loss = 0.40910785\n",
      "Iteration 375, loss = 0.40856706\n",
      "Iteration 376, loss = 0.41014523\n",
      "Iteration 377, loss = 0.40494308\n",
      "Iteration 378, loss = 0.41521588\n",
      "Iteration 379, loss = 0.41008693\n",
      "Iteration 380, loss = 0.40605864\n",
      "Iteration 381, loss = 0.40162075\n",
      "Iteration 382, loss = 0.41708595\n",
      "Iteration 383, loss = 0.41681959\n",
      "Iteration 384, loss = 0.42206933\n",
      "Iteration 385, loss = 0.41156166\n",
      "Iteration 386, loss = 0.40999793\n",
      "Iteration 387, loss = 0.41106531\n",
      "Iteration 388, loss = 0.40543477\n",
      "Iteration 389, loss = 0.39938424\n",
      "Iteration 390, loss = 0.40139660\n",
      "Iteration 391, loss = 0.39862596\n",
      "Iteration 392, loss = 0.40341589\n",
      "Iteration 393, loss = 0.39965915\n",
      "Iteration 394, loss = 0.39911800\n",
      "Iteration 395, loss = 0.39843572\n",
      "Iteration 396, loss = 0.39781683\n",
      "Iteration 397, loss = 0.40412660\n",
      "Iteration 398, loss = 0.39501775\n",
      "Iteration 399, loss = 0.41068635\n",
      "Iteration 400, loss = 0.39828238\n",
      "Iteration 401, loss = 0.40150751\n",
      "Iteration 402, loss = 0.40047506\n",
      "Iteration 403, loss = 0.39992087\n",
      "Iteration 404, loss = 0.39438978\n",
      "Iteration 405, loss = 0.40020719\n",
      "Iteration 406, loss = 0.39536460\n",
      "Iteration 407, loss = 0.39642955\n",
      "Iteration 408, loss = 0.40026511\n",
      "Iteration 409, loss = 0.39763635\n",
      "Iteration 410, loss = 0.39173764\n",
      "Iteration 411, loss = 0.39421606\n",
      "Iteration 412, loss = 0.39200118\n",
      "Iteration 413, loss = 0.39179734\n",
      "Iteration 414, loss = 0.38941662\n",
      "Iteration 415, loss = 0.39582564\n",
      "Iteration 416, loss = 0.39830242\n",
      "Iteration 417, loss = 0.39950489\n",
      "Iteration 418, loss = 0.39284434\n",
      "Iteration 419, loss = 0.39745247\n",
      "Iteration 420, loss = 0.39825572\n",
      "Iteration 421, loss = 0.40275976\n",
      "Iteration 422, loss = 0.39921128\n",
      "Iteration 423, loss = 0.38872145\n",
      "Iteration 424, loss = 0.39427858\n",
      "Iteration 425, loss = 0.39616435\n",
      "Iteration 426, loss = 0.38992603\n",
      "Iteration 427, loss = 0.38713933\n",
      "Iteration 428, loss = 0.38520611\n",
      "Iteration 429, loss = 0.39048518\n",
      "Iteration 430, loss = 0.39381026\n",
      "Iteration 431, loss = 0.38517448\n",
      "Iteration 432, loss = 0.38951916\n",
      "Iteration 433, loss = 0.38419272\n",
      "Iteration 434, loss = 0.38590073\n",
      "Iteration 435, loss = 0.38580305\n",
      "Iteration 436, loss = 0.38362108\n",
      "Iteration 437, loss = 0.38360285\n",
      "Iteration 438, loss = 0.38978484\n",
      "Iteration 439, loss = 0.39015837\n",
      "Iteration 440, loss = 0.38909263\n",
      "Iteration 441, loss = 0.37900576\n",
      "Iteration 442, loss = 0.38607394\n",
      "Iteration 443, loss = 0.38232155\n",
      "Iteration 444, loss = 0.37911932\n",
      "Iteration 445, loss = 0.38502927\n",
      "Iteration 446, loss = 0.38609129\n",
      "Iteration 447, loss = 0.38563577\n",
      "Iteration 448, loss = 0.38665615\n",
      "Iteration 449, loss = 0.38063779\n",
      "Iteration 450, loss = 0.37989135\n",
      "Iteration 451, loss = 0.38782543\n",
      "Iteration 452, loss = 0.38292758\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.1711630534534394\n",
      "Iteration 1, loss = 16.74487978\n",
      "Iteration 2, loss = 3.41289032\n",
      "Iteration 3, loss = 2.36609146\n",
      "Iteration 4, loss = 1.98883664\n",
      "Iteration 5, loss = 1.81191288\n",
      "Iteration 6, loss = 1.71214685\n",
      "Iteration 7, loss = 1.65693482\n",
      "Iteration 8, loss = 1.61233537\n",
      "Iteration 9, loss = 1.58352536\n",
      "Iteration 10, loss = 1.55667248\n",
      "Iteration 11, loss = 1.53558245\n",
      "Iteration 12, loss = 1.51115786\n",
      "Iteration 13, loss = 1.49337440\n",
      "Iteration 14, loss = 1.48124602\n",
      "Iteration 15, loss = 1.46168699\n",
      "Iteration 16, loss = 1.43952750\n",
      "Iteration 17, loss = 1.41596858\n",
      "Iteration 18, loss = 1.39642504\n",
      "Iteration 19, loss = 1.37971432\n",
      "Iteration 20, loss = 1.36710923\n",
      "Iteration 21, loss = 1.33990956\n",
      "Iteration 22, loss = 1.32256017\n",
      "Iteration 23, loss = 1.30323454\n",
      "Iteration 24, loss = 1.28591234\n",
      "Iteration 25, loss = 1.26637898\n",
      "Iteration 26, loss = 1.24612163\n",
      "Iteration 27, loss = 1.22608834\n",
      "Iteration 28, loss = 1.19972417\n",
      "Iteration 29, loss = 1.18104150\n",
      "Iteration 30, loss = 1.16650245\n",
      "Iteration 31, loss = 1.14533378\n",
      "Iteration 32, loss = 1.12594566\n",
      "Iteration 33, loss = 1.11054564\n",
      "Iteration 34, loss = 1.09416457\n",
      "Iteration 35, loss = 1.07790066\n",
      "Iteration 36, loss = 1.06756299\n",
      "Iteration 37, loss = 1.04642236\n",
      "Iteration 38, loss = 1.03142534\n",
      "Iteration 39, loss = 1.01739164\n",
      "Iteration 40, loss = 1.00373699\n",
      "Iteration 41, loss = 0.99794024\n",
      "Iteration 42, loss = 0.98041935\n",
      "Iteration 43, loss = 0.96994794\n",
      "Iteration 44, loss = 0.95976300\n",
      "Iteration 45, loss = 0.95456008\n",
      "Iteration 46, loss = 0.94824782\n",
      "Iteration 47, loss = 0.93974231\n",
      "Iteration 48, loss = 0.93358044\n",
      "Iteration 49, loss = 0.92833294\n",
      "Iteration 50, loss = 0.91963430\n",
      "Iteration 51, loss = 0.91881016\n",
      "Iteration 52, loss = 0.89914397\n",
      "Iteration 53, loss = 0.89688223\n",
      "Iteration 54, loss = 0.88864127\n",
      "Iteration 55, loss = 0.88257276\n",
      "Iteration 56, loss = 0.87595590\n",
      "Iteration 57, loss = 0.86975426\n",
      "Iteration 58, loss = 0.86655850\n",
      "Iteration 59, loss = 0.86442666\n",
      "Iteration 60, loss = 0.86307831\n",
      "Iteration 61, loss = 0.85262243\n",
      "Iteration 62, loss = 0.84771436\n",
      "Iteration 63, loss = 0.84179718\n",
      "Iteration 64, loss = 0.83813930\n",
      "Iteration 65, loss = 0.83180446\n",
      "Iteration 66, loss = 0.83250712\n",
      "Iteration 67, loss = 0.82439015\n",
      "Iteration 68, loss = 0.82546464\n",
      "Iteration 69, loss = 0.81286016\n",
      "Iteration 70, loss = 0.82015322\n",
      "Iteration 71, loss = 0.80432639\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 72, loss = 0.80097907\n",
      "Iteration 73, loss = 0.79859064\n",
      "Iteration 74, loss = 0.79023905\n",
      "Iteration 75, loss = 0.80528964\n",
      "Iteration 76, loss = 0.78639569\n",
      "Iteration 77, loss = 0.78146744\n",
      "Iteration 78, loss = 0.77847807\n",
      "Iteration 79, loss = 0.77170158\n",
      "Iteration 80, loss = 0.77966069\n",
      "Iteration 81, loss = 0.76954003\n",
      "Iteration 82, loss = 0.76104786\n",
      "Iteration 83, loss = 0.76131015\n",
      "Iteration 84, loss = 0.76040653\n",
      "Iteration 85, loss = 0.75689581\n",
      "Iteration 86, loss = 0.75329922\n",
      "Iteration 87, loss = 0.75551075\n",
      "Iteration 88, loss = 0.75122803\n",
      "Iteration 89, loss = 0.74591191\n",
      "Iteration 90, loss = 0.73683854\n",
      "Iteration 91, loss = 0.73477943\n",
      "Iteration 92, loss = 0.73345580\n",
      "Iteration 93, loss = 0.73882936\n",
      "Iteration 94, loss = 0.72541305\n",
      "Iteration 95, loss = 0.72159544\n",
      "Iteration 96, loss = 0.71756391\n",
      "Iteration 97, loss = 0.71657771\n",
      "Iteration 98, loss = 0.72893399\n",
      "Iteration 99, loss = 0.72977868\n",
      "Iteration 100, loss = 0.72023196\n",
      "Iteration 101, loss = 0.71087683\n",
      "Iteration 102, loss = 0.70448856\n",
      "Iteration 103, loss = 0.70204146\n",
      "Iteration 104, loss = 0.70084874\n",
      "Iteration 105, loss = 0.70087627\n",
      "Iteration 106, loss = 0.69855254\n",
      "Iteration 107, loss = 0.69419160\n",
      "Iteration 108, loss = 0.68988880\n",
      "Iteration 109, loss = 0.68848305\n",
      "Iteration 110, loss = 0.68471543\n",
      "Iteration 111, loss = 0.68636031\n",
      "Iteration 112, loss = 0.68617403\n",
      "Iteration 113, loss = 0.68615556\n",
      "Iteration 114, loss = 0.67752525\n",
      "Iteration 115, loss = 0.67937995\n",
      "Iteration 116, loss = 0.67378045\n",
      "Iteration 117, loss = 0.68245986\n",
      "Iteration 118, loss = 0.66617722\n",
      "Iteration 119, loss = 0.66495702\n",
      "Iteration 120, loss = 0.66268828\n",
      "Iteration 121, loss = 0.66805194\n",
      "Iteration 122, loss = 0.65717375\n",
      "Iteration 123, loss = 0.66148730\n",
      "Iteration 124, loss = 0.65944548\n",
      "Iteration 125, loss = 0.65562413\n",
      "Iteration 126, loss = 0.64708214\n",
      "Iteration 127, loss = 0.64904262\n",
      "Iteration 128, loss = 0.64721792\n",
      "Iteration 129, loss = 0.64186086\n",
      "Iteration 130, loss = 0.64347140\n",
      "Iteration 131, loss = 0.63950079\n",
      "Iteration 132, loss = 0.63883546\n",
      "Iteration 133, loss = 0.63505736\n",
      "Iteration 134, loss = 0.63132154\n",
      "Iteration 135, loss = 0.64979319\n",
      "Iteration 136, loss = 0.62881782\n",
      "Iteration 137, loss = 0.63196389\n",
      "Iteration 138, loss = 0.62850930\n",
      "Iteration 139, loss = 0.62169244\n",
      "Iteration 140, loss = 0.62063233\n",
      "Iteration 141, loss = 0.61507781\n",
      "Iteration 142, loss = 0.61963640\n",
      "Iteration 143, loss = 0.61938147\n",
      "Iteration 144, loss = 0.62049505\n",
      "Iteration 145, loss = 0.61682925\n",
      "Iteration 146, loss = 0.61352143\n",
      "Iteration 147, loss = 0.61420183\n",
      "Iteration 148, loss = 0.60983383\n",
      "Iteration 149, loss = 0.60248057\n",
      "Iteration 150, loss = 0.61136646\n",
      "Iteration 151, loss = 0.60773179\n",
      "Iteration 152, loss = 0.59952279\n",
      "Iteration 153, loss = 0.60895253\n",
      "Iteration 154, loss = 0.59850191\n",
      "Iteration 155, loss = 0.59731426\n",
      "Iteration 156, loss = 0.59691670\n",
      "Iteration 157, loss = 0.59576698\n",
      "Iteration 158, loss = 0.59154280\n",
      "Iteration 159, loss = 0.60102129\n",
      "Iteration 160, loss = 0.59066757\n",
      "Iteration 161, loss = 0.58896961\n",
      "Iteration 162, loss = 0.58942506\n",
      "Iteration 163, loss = 0.58559299\n",
      "Iteration 164, loss = 0.58656810\n",
      "Iteration 165, loss = 0.58629849\n",
      "Iteration 166, loss = 0.58369072\n",
      "Iteration 167, loss = 0.58334252\n",
      "Iteration 168, loss = 0.57503769\n",
      "Iteration 169, loss = 0.57976145\n",
      "Iteration 170, loss = 0.57422413\n",
      "Iteration 171, loss = 0.57634814\n",
      "Iteration 172, loss = 0.57758330\n",
      "Iteration 173, loss = 0.57349775\n",
      "Iteration 174, loss = 0.56994934\n",
      "Iteration 175, loss = 0.56807613\n",
      "Iteration 176, loss = 0.56787775\n",
      "Iteration 177, loss = 0.56896911\n",
      "Iteration 178, loss = 0.56303096\n",
      "Iteration 179, loss = 0.56400116\n",
      "Iteration 180, loss = 0.56770395\n",
      "Iteration 181, loss = 0.56129279\n",
      "Iteration 182, loss = 0.55551097\n",
      "Iteration 183, loss = 0.55948206\n",
      "Iteration 184, loss = 0.56165590\n",
      "Iteration 185, loss = 0.55471169\n",
      "Iteration 186, loss = 0.55536191\n",
      "Iteration 187, loss = 0.55290728\n",
      "Iteration 188, loss = 0.59722728\n",
      "Iteration 189, loss = 0.55350094\n",
      "Iteration 190, loss = 0.55142640\n",
      "Iteration 191, loss = 0.54571494\n",
      "Iteration 192, loss = 0.54059090\n",
      "Iteration 193, loss = 0.54776920\n",
      "Iteration 194, loss = 0.55010847\n",
      "Iteration 195, loss = 0.54687860\n",
      "Iteration 196, loss = 0.54093747\n",
      "Iteration 197, loss = 0.53881435\n",
      "Iteration 198, loss = 0.53723280\n",
      "Iteration 199, loss = 0.54065621\n",
      "Iteration 200, loss = 0.52871033\n",
      "Iteration 201, loss = 0.53252848\n",
      "Iteration 202, loss = 0.53420425\n",
      "Iteration 203, loss = 0.53684680\n",
      "Iteration 204, loss = 0.53551548\n",
      "Iteration 205, loss = 0.53306266\n",
      "Iteration 206, loss = 0.53339289\n",
      "Iteration 207, loss = 0.53213014\n",
      "Iteration 208, loss = 0.53383204\n",
      "Iteration 209, loss = 0.51919298\n",
      "Iteration 210, loss = 0.52069591\n",
      "Iteration 211, loss = 0.52232945\n",
      "Iteration 212, loss = 0.53801301\n",
      "Iteration 213, loss = 0.52928723\n",
      "Iteration 214, loss = 0.52176637\n",
      "Iteration 215, loss = 0.51712426\n",
      "Iteration 216, loss = 0.51473460\n",
      "Iteration 217, loss = 0.51396253\n",
      "Iteration 218, loss = 0.51468937\n",
      "Iteration 219, loss = 0.51923027\n",
      "Iteration 220, loss = 0.51234140\n",
      "Iteration 221, loss = 0.52166055\n",
      "Iteration 222, loss = 0.51415631\n",
      "Iteration 223, loss = 0.51787219\n",
      "Iteration 224, loss = 0.50701003\n",
      "Iteration 225, loss = 0.50758507\n",
      "Iteration 226, loss = 0.51226726\n",
      "Iteration 227, loss = 0.54240652\n",
      "Iteration 228, loss = 0.51184413\n",
      "Iteration 229, loss = 0.51247089\n",
      "Iteration 230, loss = 0.51003815\n",
      "Iteration 231, loss = 0.51039112\n",
      "Iteration 232, loss = 0.51076309\n",
      "Iteration 233, loss = 0.50963854\n",
      "Iteration 234, loss = 0.50247513\n",
      "Iteration 235, loss = 0.50558576\n",
      "Iteration 236, loss = 0.50497506\n",
      "Iteration 237, loss = 0.50218891\n",
      "Iteration 238, loss = 0.50320674\n",
      "Iteration 239, loss = 0.49945670\n",
      "Iteration 240, loss = 0.49558895\n",
      "Iteration 241, loss = 0.49927966\n",
      "Iteration 242, loss = 0.50096671\n",
      "Iteration 243, loss = 0.49432213\n",
      "Iteration 244, loss = 0.49799516\n",
      "Iteration 245, loss = 0.49293389\n",
      "Iteration 246, loss = 0.49223240\n",
      "Iteration 247, loss = 0.49006503\n",
      "Iteration 248, loss = 0.49062357\n",
      "Iteration 249, loss = 0.49439445\n",
      "Iteration 250, loss = 0.49615669\n",
      "Iteration 251, loss = 0.48370207\n",
      "Iteration 252, loss = 0.48355847\n",
      "Iteration 253, loss = 0.48519857\n",
      "Iteration 254, loss = 0.48536532\n",
      "Iteration 255, loss = 0.48682046\n",
      "Iteration 256, loss = 0.48591450\n",
      "Iteration 257, loss = 0.48597871\n",
      "Iteration 258, loss = 0.48137140\n",
      "Iteration 259, loss = 0.48040398\n",
      "Iteration 260, loss = 0.47706945\n",
      "Iteration 261, loss = 0.47442554\n",
      "Iteration 262, loss = 0.47571403\n",
      "Iteration 263, loss = 0.47568393\n",
      "Iteration 264, loss = 0.47894001\n",
      "Iteration 265, loss = 0.47718497\n",
      "Iteration 266, loss = 0.47187789\n",
      "Iteration 267, loss = 0.46765813\n",
      "Iteration 268, loss = 0.47257628\n",
      "Iteration 269, loss = 0.47246847\n",
      "Iteration 270, loss = 0.46865333\n",
      "Iteration 271, loss = 0.47819379\n",
      "Iteration 272, loss = 0.46908456\n",
      "Iteration 273, loss = 0.46696726\n",
      "Iteration 274, loss = 0.46573657\n",
      "Iteration 275, loss = 0.46506987\n",
      "Iteration 276, loss = 0.46805070\n",
      "Iteration 277, loss = 0.46232083\n",
      "Iteration 278, loss = 0.46734400\n",
      "Iteration 279, loss = 0.46062879\n",
      "Iteration 280, loss = 0.46748497\n",
      "Iteration 281, loss = 0.46876445\n",
      "Iteration 282, loss = 0.46457703\n",
      "Iteration 283, loss = 0.46263380\n",
      "Iteration 284, loss = 0.46274950\n",
      "Iteration 285, loss = 0.45733653\n",
      "Iteration 286, loss = 0.45978510\n",
      "Iteration 287, loss = 0.45430438\n",
      "Iteration 288, loss = 0.45285310\n",
      "Iteration 289, loss = 0.45875351\n",
      "Iteration 290, loss = 0.45636989\n",
      "Iteration 291, loss = 0.45168655\n",
      "Iteration 292, loss = 0.44999760\n",
      "Iteration 293, loss = 0.45386181\n",
      "Iteration 294, loss = 0.45597237\n",
      "Iteration 295, loss = 0.45528007\n",
      "Iteration 296, loss = 0.45111324\n",
      "Iteration 297, loss = 0.45455934\n",
      "Iteration 298, loss = 0.45153960\n",
      "Iteration 299, loss = 0.44770246\n",
      "Iteration 300, loss = 0.45495313\n",
      "Iteration 301, loss = 0.45100030\n",
      "Iteration 302, loss = 0.44856272\n",
      "Iteration 303, loss = 0.44802247\n",
      "Iteration 304, loss = 0.44695623\n",
      "Iteration 305, loss = 0.44573244\n",
      "Iteration 306, loss = 0.44328097\n",
      "Iteration 307, loss = 0.44358578\n",
      "Iteration 308, loss = 0.44910926\n",
      "Iteration 309, loss = 0.44478119\n",
      "Iteration 310, loss = 0.44400862\n",
      "Iteration 311, loss = 0.44071363\n",
      "Iteration 312, loss = 0.44309749\n",
      "Iteration 313, loss = 0.44607785\n",
      "Iteration 314, loss = 0.44747052\n",
      "Iteration 315, loss = 0.44010464\n",
      "Iteration 316, loss = 0.43688667\n",
      "Iteration 317, loss = 0.43695568\n",
      "Iteration 318, loss = 0.42822192\n",
      "Iteration 319, loss = 0.43797709\n",
      "Iteration 320, loss = 0.43902519\n",
      "Iteration 321, loss = 0.43542942\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 322, loss = 0.43484326\n",
      "Iteration 323, loss = 0.43720449\n",
      "Iteration 324, loss = 0.43618007\n",
      "Iteration 325, loss = 0.43247529\n",
      "Iteration 326, loss = 0.43330422\n",
      "Iteration 327, loss = 0.43921922\n",
      "Iteration 328, loss = 0.42532324\n",
      "Iteration 329, loss = 0.43675487\n",
      "Iteration 330, loss = 0.43722031\n",
      "Iteration 331, loss = 0.43281675\n",
      "Iteration 332, loss = 0.43001339\n",
      "Iteration 333, loss = 0.43008946\n",
      "Iteration 334, loss = 0.43482417\n",
      "Iteration 335, loss = 0.42571897\n",
      "Iteration 336, loss = 0.42729157\n",
      "Iteration 337, loss = 0.42937985\n",
      "Iteration 338, loss = 0.42645548\n",
      "Iteration 339, loss = 0.42944174\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "1.8742336573335538\n",
      "Iteration 1, loss = 16.65242072\n",
      "Iteration 2, loss = 3.34846050\n",
      "Iteration 3, loss = 2.31841392\n",
      "Iteration 4, loss = 1.95713285\n",
      "Iteration 5, loss = 1.78287162\n",
      "Iteration 6, loss = 1.68777453\n",
      "Iteration 7, loss = 1.63294225\n",
      "Iteration 8, loss = 1.58822352\n",
      "Iteration 9, loss = 1.55654865\n",
      "Iteration 10, loss = 1.53244822\n",
      "Iteration 11, loss = 1.50856712\n",
      "Iteration 12, loss = 1.48672719\n",
      "Iteration 13, loss = 1.46998354\n",
      "Iteration 14, loss = 1.45662026\n",
      "Iteration 15, loss = 1.43893960\n",
      "Iteration 16, loss = 1.41411371\n",
      "Iteration 17, loss = 1.39057821\n",
      "Iteration 18, loss = 1.37534990\n",
      "Iteration 19, loss = 1.35500065\n",
      "Iteration 20, loss = 1.34181375\n",
      "Iteration 21, loss = 1.32114706\n",
      "Iteration 22, loss = 1.29950730\n",
      "Iteration 23, loss = 1.27898372\n",
      "Iteration 24, loss = 1.26306628\n",
      "Iteration 25, loss = 1.24377559\n",
      "Iteration 26, loss = 1.22262499\n",
      "Iteration 27, loss = 1.19837841\n",
      "Iteration 28, loss = 1.17383450\n",
      "Iteration 29, loss = 1.15611358\n",
      "Iteration 30, loss = 1.14010615\n",
      "Iteration 31, loss = 1.12248494\n",
      "Iteration 32, loss = 1.10064079\n",
      "Iteration 33, loss = 1.08576598\n",
      "Iteration 34, loss = 1.07524285\n",
      "Iteration 35, loss = 1.05760074\n",
      "Iteration 36, loss = 1.03848598\n",
      "Iteration 37, loss = 1.02307660\n",
      "Iteration 38, loss = 1.01220053\n",
      "Iteration 39, loss = 0.99876687\n",
      "Iteration 40, loss = 0.98984584\n",
      "Iteration 41, loss = 0.98113645\n",
      "Iteration 42, loss = 0.96211120\n",
      "Iteration 43, loss = 0.95620162\n",
      "Iteration 44, loss = 0.94662836\n",
      "Iteration 45, loss = 0.94071914\n",
      "Iteration 46, loss = 0.93642387\n",
      "Iteration 47, loss = 0.92899979\n",
      "Iteration 48, loss = 0.92040555\n",
      "Iteration 49, loss = 0.91008686\n",
      "Iteration 50, loss = 0.90757369\n",
      "Iteration 51, loss = 0.89645147\n",
      "Iteration 52, loss = 0.88899628\n",
      "Iteration 53, loss = 0.88413662\n",
      "Iteration 54, loss = 0.87742264\n",
      "Iteration 55, loss = 0.87059994\n",
      "Iteration 56, loss = 0.86516147\n",
      "Iteration 57, loss = 0.85517056\n",
      "Iteration 58, loss = 0.85853255\n",
      "Iteration 59, loss = 0.85480591\n",
      "Iteration 60, loss = 0.84611120\n",
      "Iteration 61, loss = 0.84327498\n",
      "Iteration 62, loss = 0.84161725\n",
      "Iteration 63, loss = 0.83027910\n",
      "Iteration 64, loss = 0.82498985\n",
      "Iteration 65, loss = 0.82246417\n",
      "Iteration 66, loss = 0.81835343\n",
      "Iteration 67, loss = 0.81349186\n",
      "Iteration 68, loss = 0.80778727\n",
      "Iteration 69, loss = 0.79671126\n",
      "Iteration 70, loss = 0.81367876\n",
      "Iteration 71, loss = 0.79197674\n",
      "Iteration 72, loss = 0.79208737\n",
      "Iteration 73, loss = 0.78659248\n",
      "Iteration 74, loss = 0.78193575\n",
      "Iteration 75, loss = 0.79829756\n",
      "Iteration 76, loss = 0.76937067\n",
      "Iteration 77, loss = 0.77061911\n",
      "Iteration 78, loss = 0.76999615\n",
      "Iteration 79, loss = 0.76235078\n",
      "Iteration 80, loss = 0.76803499\n",
      "Iteration 81, loss = 0.76026339\n",
      "Iteration 82, loss = 0.74754368\n",
      "Iteration 83, loss = 0.75316261\n",
      "Iteration 84, loss = 0.74664967\n",
      "Iteration 85, loss = 0.74603019\n",
      "Iteration 86, loss = 0.76567716\n",
      "Iteration 87, loss = 0.74187836\n",
      "Iteration 88, loss = 0.74705799\n",
      "Iteration 89, loss = 0.73328483\n",
      "Iteration 90, loss = 0.73228529\n",
      "Iteration 91, loss = 0.73271607\n",
      "Iteration 92, loss = 0.72399559\n",
      "Iteration 93, loss = 0.73429273\n",
      "Iteration 94, loss = 0.71707001\n",
      "Iteration 95, loss = 0.74006730\n",
      "Iteration 96, loss = 0.70669935\n",
      "Iteration 97, loss = 0.70627545\n",
      "Iteration 98, loss = 0.70445515\n",
      "Iteration 99, loss = 0.70318188\n",
      "Iteration 100, loss = 0.70704380\n",
      "Iteration 101, loss = 0.69811463\n",
      "Iteration 102, loss = 0.69046328\n",
      "Iteration 103, loss = 0.69129389\n",
      "Iteration 104, loss = 0.68712302\n",
      "Iteration 105, loss = 0.69390563\n",
      "Iteration 106, loss = 0.68951996\n",
      "Iteration 107, loss = 0.68246928\n",
      "Iteration 108, loss = 0.67658895\n",
      "Iteration 109, loss = 0.67506379\n",
      "Iteration 110, loss = 0.67391089\n",
      "Iteration 111, loss = 0.67709757\n",
      "Iteration 112, loss = 0.67614115\n",
      "Iteration 113, loss = 0.67387049\n",
      "Iteration 114, loss = 0.66436307\n",
      "Iteration 115, loss = 0.66643419\n",
      "Iteration 116, loss = 0.66249548\n",
      "Iteration 117, loss = 0.65964580\n",
      "Iteration 118, loss = 0.65307916\n",
      "Iteration 119, loss = 0.65611258\n",
      "Iteration 120, loss = 0.65144703\n",
      "Iteration 121, loss = 0.66039859\n",
      "Iteration 122, loss = 0.64720897\n",
      "Iteration 123, loss = 0.64921265\n",
      "Iteration 124, loss = 0.64744432\n",
      "Iteration 125, loss = 0.64336971\n",
      "Iteration 126, loss = 0.64003772\n",
      "Iteration 127, loss = 0.63925086\n",
      "Iteration 128, loss = 0.63700694\n",
      "Iteration 129, loss = 0.62796367\n",
      "Iteration 130, loss = 0.62957750\n",
      "Iteration 131, loss = 0.62621203\n",
      "Iteration 132, loss = 0.62781368\n",
      "Iteration 133, loss = 0.62833558\n",
      "Iteration 134, loss = 0.62456292\n",
      "Iteration 135, loss = 0.64422411\n",
      "Iteration 136, loss = 0.61861829\n",
      "Iteration 137, loss = 0.61769410\n",
      "Iteration 138, loss = 0.61918916\n",
      "Iteration 139, loss = 0.61450340\n",
      "Iteration 140, loss = 0.60999477\n",
      "Iteration 141, loss = 0.60953288\n",
      "Iteration 142, loss = 0.61902086\n",
      "Iteration 143, loss = 0.61260404\n",
      "Iteration 144, loss = 0.63061595\n",
      "Iteration 145, loss = 0.60541698\n",
      "Iteration 146, loss = 0.60337387\n",
      "Iteration 147, loss = 0.60553496\n",
      "Iteration 148, loss = 0.60206858\n",
      "Iteration 149, loss = 0.59663670\n",
      "Iteration 150, loss = 0.60223378\n",
      "Iteration 151, loss = 0.59799301\n",
      "Iteration 152, loss = 0.59692192\n",
      "Iteration 153, loss = 0.59828365\n",
      "Iteration 154, loss = 0.58898520\n",
      "Iteration 155, loss = 0.58790801\n",
      "Iteration 156, loss = 0.59241461\n",
      "Iteration 157, loss = 0.58851201\n",
      "Iteration 158, loss = 0.58771413\n",
      "Iteration 159, loss = 0.58416649\n",
      "Iteration 160, loss = 0.58009693\n",
      "Iteration 161, loss = 0.57970065\n",
      "Iteration 162, loss = 0.58356615\n",
      "Iteration 163, loss = 0.57766303\n",
      "Iteration 164, loss = 0.57476485\n",
      "Iteration 165, loss = 0.58946327\n",
      "Iteration 166, loss = 0.57781278\n",
      "Iteration 167, loss = 0.57041425\n",
      "Iteration 168, loss = 0.57572832\n",
      "Iteration 169, loss = 0.57639858\n",
      "Iteration 170, loss = 0.56538826\n",
      "Iteration 171, loss = 0.56626368\n",
      "Iteration 172, loss = 0.59257569\n",
      "Iteration 173, loss = 0.56312837\n",
      "Iteration 174, loss = 0.56237753\n",
      "Iteration 175, loss = 0.56011025\n",
      "Iteration 176, loss = 0.56207260\n",
      "Iteration 177, loss = 0.56200629\n",
      "Iteration 178, loss = 0.55815742\n",
      "Iteration 179, loss = 0.55839268\n",
      "Iteration 180, loss = 0.56550648\n",
      "Iteration 181, loss = 0.55168780\n",
      "Iteration 182, loss = 0.55025789\n",
      "Iteration 183, loss = 0.54959719\n",
      "Iteration 184, loss = 0.55757810\n",
      "Iteration 185, loss = 0.54519823\n",
      "Iteration 186, loss = 0.54760957\n",
      "Iteration 187, loss = 0.54251498\n",
      "Iteration 188, loss = 0.57079212\n",
      "Iteration 189, loss = 0.54539883\n",
      "Iteration 190, loss = 0.54486578\n",
      "Iteration 191, loss = 0.53640038\n",
      "Iteration 192, loss = 0.53521804\n",
      "Iteration 193, loss = 0.53585838\n",
      "Iteration 194, loss = 0.53701302\n",
      "Iteration 195, loss = 0.53852090\n",
      "Iteration 196, loss = 0.53592128\n",
      "Iteration 197, loss = 0.53692625\n",
      "Iteration 198, loss = 0.53482888\n",
      "Iteration 199, loss = 0.53068117\n",
      "Iteration 200, loss = 0.52970976\n",
      "Iteration 201, loss = 0.52926200\n",
      "Iteration 202, loss = 0.52819344\n",
      "Iteration 203, loss = 0.53123951\n",
      "Iteration 204, loss = 0.52801284\n",
      "Iteration 205, loss = 0.52704989\n",
      "Iteration 206, loss = 0.52730506\n",
      "Iteration 207, loss = 0.52004820\n",
      "Iteration 208, loss = 0.52522294\n",
      "Iteration 209, loss = 0.51292563\n",
      "Iteration 210, loss = 0.51302202\n",
      "Iteration 211, loss = 0.51493875\n",
      "Iteration 212, loss = 0.51432239\n",
      "Iteration 213, loss = 0.51302602\n",
      "Iteration 214, loss = 0.51252335\n",
      "Iteration 215, loss = 0.50826696\n",
      "Iteration 216, loss = 0.50461850\n",
      "Iteration 217, loss = 0.50620319\n",
      "Iteration 218, loss = 0.50375647\n",
      "Iteration 219, loss = 0.50595316\n",
      "Iteration 220, loss = 0.50398923\n",
      "Iteration 221, loss = 0.51209142\n",
      "Iteration 222, loss = 0.50274220\n",
      "Iteration 223, loss = 0.50726024\n",
      "Iteration 224, loss = 0.49901669\n",
      "Iteration 225, loss = 0.51054093\n",
      "Iteration 226, loss = 0.50152088\n",
      "Iteration 227, loss = 0.51952902\n",
      "Iteration 228, loss = 0.49541419\n",
      "Iteration 229, loss = 0.49957225\n",
      "Iteration 230, loss = 0.49771572\n",
      "Iteration 231, loss = 0.50418557\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 232, loss = 0.49784338\n",
      "Iteration 233, loss = 0.49508670\n",
      "Iteration 234, loss = 0.49376446\n",
      "Iteration 235, loss = 0.49272003\n",
      "Iteration 236, loss = 0.49136818\n",
      "Iteration 237, loss = 0.49132347\n",
      "Iteration 238, loss = 0.48930415\n",
      "Iteration 239, loss = 0.48998796\n",
      "Iteration 240, loss = 0.48381837\n",
      "Iteration 241, loss = 0.48884039\n",
      "Iteration 242, loss = 0.49196398\n",
      "Iteration 243, loss = 0.48003704\n",
      "Iteration 244, loss = 0.48386253\n",
      "Iteration 245, loss = 0.48163485\n",
      "Iteration 246, loss = 0.47970016\n",
      "Iteration 247, loss = 0.47392219\n",
      "Iteration 248, loss = 0.47859324\n",
      "Iteration 249, loss = 0.48055485\n",
      "Iteration 250, loss = 0.48074206\n",
      "Iteration 251, loss = 0.47387355\n",
      "Iteration 252, loss = 0.47231732\n",
      "Iteration 253, loss = 0.46972725\n",
      "Iteration 254, loss = 0.47201194\n",
      "Iteration 255, loss = 0.46775439\n",
      "Iteration 256, loss = 0.47185949\n",
      "Iteration 257, loss = 0.46688363\n",
      "Iteration 258, loss = 0.46409588\n",
      "Iteration 259, loss = 0.46764983\n",
      "Iteration 260, loss = 0.46527313\n",
      "Iteration 261, loss = 0.46198203\n",
      "Iteration 262, loss = 0.46360351\n",
      "Iteration 263, loss = 0.45976687\n",
      "Iteration 264, loss = 0.46608371\n",
      "Iteration 265, loss = 0.46258941\n",
      "Iteration 266, loss = 0.46264438\n",
      "Iteration 267, loss = 0.45462411\n",
      "Iteration 268, loss = 0.45717338\n",
      "Iteration 269, loss = 0.45893186\n",
      "Iteration 270, loss = 0.45624617\n",
      "Iteration 271, loss = 0.47301812\n",
      "Iteration 272, loss = 0.45585478\n",
      "Iteration 273, loss = 0.45427425\n",
      "Iteration 274, loss = 0.45468310\n",
      "Iteration 275, loss = 0.45630782\n",
      "Iteration 276, loss = 0.45990820\n",
      "Iteration 277, loss = 0.45012312\n",
      "Iteration 278, loss = 0.45472188\n",
      "Iteration 279, loss = 0.45169603\n",
      "Iteration 280, loss = 0.45282274\n",
      "Iteration 281, loss = 0.45670705\n",
      "Iteration 282, loss = 0.45072240\n",
      "Iteration 283, loss = 0.45083821\n",
      "Iteration 284, loss = 0.44628561\n",
      "Iteration 285, loss = 0.44390305\n",
      "Iteration 286, loss = 0.44792501\n",
      "Iteration 287, loss = 0.44203088\n",
      "Iteration 288, loss = 0.44322593\n",
      "Iteration 289, loss = 0.44233314\n",
      "Iteration 290, loss = 0.44154235\n",
      "Iteration 291, loss = 0.43696279\n",
      "Iteration 292, loss = 0.43794318\n",
      "Iteration 293, loss = 0.44140818\n",
      "Iteration 294, loss = 0.44247557\n",
      "Iteration 295, loss = 0.43972781\n",
      "Iteration 296, loss = 0.43548800\n",
      "Iteration 297, loss = 0.44042667\n",
      "Iteration 298, loss = 0.43537798\n",
      "Iteration 299, loss = 0.43404759\n",
      "Iteration 300, loss = 0.43610679\n",
      "Iteration 301, loss = 0.44038711\n",
      "Iteration 302, loss = 0.43647828\n",
      "Iteration 303, loss = 0.43277376\n",
      "Iteration 304, loss = 0.43165258\n",
      "Iteration 305, loss = 0.43379433\n",
      "Iteration 306, loss = 0.42585391\n",
      "Iteration 307, loss = 0.42871387\n",
      "Iteration 308, loss = 0.42962805\n",
      "Iteration 309, loss = 0.42857399\n",
      "Iteration 310, loss = 0.42457927\n",
      "Iteration 311, loss = 0.42875773\n",
      "Iteration 312, loss = 0.42910418\n",
      "Iteration 313, loss = 0.42612976\n",
      "Iteration 314, loss = 0.42745952\n",
      "Iteration 315, loss = 0.42676559\n",
      "Iteration 316, loss = 0.42044468\n",
      "Iteration 317, loss = 0.42392528\n",
      "Iteration 318, loss = 0.41623950\n",
      "Iteration 319, loss = 0.43465541\n",
      "Iteration 320, loss = 0.42500685\n",
      "Iteration 321, loss = 0.42454351\n",
      "Iteration 322, loss = 0.42202294\n",
      "Iteration 323, loss = 0.42436673\n",
      "Iteration 324, loss = 0.42150239\n",
      "Iteration 325, loss = 0.41690075\n",
      "Iteration 326, loss = 0.41747634\n",
      "Iteration 327, loss = 0.41783693\n",
      "Iteration 328, loss = 0.40911043\n",
      "Iteration 329, loss = 0.41832221\n",
      "Iteration 330, loss = 0.42025671\n",
      "Iteration 331, loss = 0.41441633\n",
      "Iteration 332, loss = 0.41006461\n",
      "Iteration 333, loss = 0.41195962\n",
      "Iteration 334, loss = 0.42548687\n",
      "Iteration 335, loss = 0.40755337\n",
      "Iteration 336, loss = 0.41237294\n",
      "Iteration 337, loss = 0.41987606\n",
      "Iteration 338, loss = 0.41780225\n",
      "Iteration 339, loss = 0.41582582\n",
      "Iteration 340, loss = 0.40822404\n",
      "Iteration 341, loss = 0.42652638\n",
      "Iteration 342, loss = 0.40991929\n",
      "Iteration 343, loss = 0.41618265\n",
      "Iteration 344, loss = 0.40559353\n",
      "Iteration 345, loss = 0.40608315\n",
      "Iteration 346, loss = 0.41163454\n",
      "Iteration 347, loss = 0.40204969\n",
      "Iteration 348, loss = 0.40941374\n",
      "Iteration 349, loss = 0.40873054\n",
      "Iteration 350, loss = 0.40611926\n",
      "Iteration 351, loss = 0.40579614\n",
      "Iteration 352, loss = 0.39922173\n",
      "Iteration 353, loss = 0.40073851\n",
      "Iteration 354, loss = 0.41372510\n",
      "Iteration 355, loss = 0.39928253\n",
      "Iteration 356, loss = 0.40348496\n",
      "Iteration 357, loss = 0.40886974\n",
      "Iteration 358, loss = 0.40451973\n",
      "Iteration 359, loss = 0.40224932\n",
      "Iteration 360, loss = 0.39323460\n",
      "Iteration 361, loss = 0.39831765\n",
      "Iteration 362, loss = 0.40841619\n",
      "Iteration 363, loss = 0.39933025\n",
      "Iteration 364, loss = 0.39592241\n",
      "Iteration 365, loss = 0.39779543\n",
      "Iteration 366, loss = 0.39527120\n",
      "Iteration 367, loss = 0.39355662\n",
      "Iteration 368, loss = 0.40135294\n",
      "Iteration 369, loss = 0.39735776\n",
      "Iteration 370, loss = 0.40671706\n",
      "Iteration 371, loss = 0.39484951\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.0601817475073454\n",
      "Iteration 1, loss = 16.64897717\n",
      "Iteration 2, loss = 3.36593159\n",
      "Iteration 3, loss = 2.32034234\n",
      "Iteration 4, loss = 1.95478350\n",
      "Iteration 5, loss = 1.78422092\n",
      "Iteration 6, loss = 1.69000264\n",
      "Iteration 7, loss = 1.64001443\n",
      "Iteration 8, loss = 1.59238069\n",
      "Iteration 9, loss = 1.56156059\n",
      "Iteration 10, loss = 1.53735007\n",
      "Iteration 11, loss = 1.51427013\n",
      "Iteration 12, loss = 1.49474343\n",
      "Iteration 13, loss = 1.47969163\n",
      "Iteration 14, loss = 1.46164482\n",
      "Iteration 15, loss = 1.44356983\n",
      "Iteration 16, loss = 1.42263959\n",
      "Iteration 17, loss = 1.40467947\n",
      "Iteration 18, loss = 1.38337160\n",
      "Iteration 19, loss = 1.36854942\n",
      "Iteration 20, loss = 1.35350833\n",
      "Iteration 21, loss = 1.33506867\n",
      "Iteration 22, loss = 1.31349722\n",
      "Iteration 23, loss = 1.29675661\n",
      "Iteration 24, loss = 1.27687015\n",
      "Iteration 25, loss = 1.26004814\n",
      "Iteration 26, loss = 1.24123059\n",
      "Iteration 27, loss = 1.22052648\n",
      "Iteration 28, loss = 1.19397225\n",
      "Iteration 29, loss = 1.17663961\n",
      "Iteration 30, loss = 1.16629236\n",
      "Iteration 31, loss = 1.14172885\n",
      "Iteration 32, loss = 1.12415861\n",
      "Iteration 33, loss = 1.11013899\n",
      "Iteration 34, loss = 1.09433001\n",
      "Iteration 35, loss = 1.08000986\n",
      "Iteration 36, loss = 1.05643382\n",
      "Iteration 37, loss = 1.04857697\n",
      "Iteration 38, loss = 1.03193417\n",
      "Iteration 39, loss = 1.02250992\n",
      "Iteration 40, loss = 1.01075174\n",
      "Iteration 41, loss = 0.99855818\n",
      "Iteration 42, loss = 0.98152337\n",
      "Iteration 43, loss = 0.97487830\n",
      "Iteration 44, loss = 0.96284322\n",
      "Iteration 45, loss = 0.95414965\n",
      "Iteration 46, loss = 0.95078593\n",
      "Iteration 47, loss = 0.93975365\n",
      "Iteration 48, loss = 0.93577813\n",
      "Iteration 49, loss = 0.92548807\n",
      "Iteration 50, loss = 0.92160746\n",
      "Iteration 51, loss = 0.91298558\n",
      "Iteration 52, loss = 0.90559954\n",
      "Iteration 53, loss = 0.90305867\n",
      "Iteration 54, loss = 0.88595998\n",
      "Iteration 55, loss = 0.88109677\n",
      "Iteration 56, loss = 0.87503929\n",
      "Iteration 57, loss = 0.87066244\n",
      "Iteration 58, loss = 0.86725576\n",
      "Iteration 59, loss = 0.86374801\n",
      "Iteration 60, loss = 0.86134009\n",
      "Iteration 61, loss = 0.85707807\n",
      "Iteration 62, loss = 0.84842279\n",
      "Iteration 63, loss = 0.84267062\n",
      "Iteration 64, loss = 0.83861151\n",
      "Iteration 65, loss = 0.83968142\n",
      "Iteration 66, loss = 0.83100689\n",
      "Iteration 67, loss = 0.82088094\n",
      "Iteration 68, loss = 0.81681238\n",
      "Iteration 69, loss = 0.81511179\n",
      "Iteration 70, loss = 0.81654685\n",
      "Iteration 71, loss = 0.80004630\n",
      "Iteration 72, loss = 0.80995246\n",
      "Iteration 73, loss = 0.79796118\n",
      "Iteration 74, loss = 0.79561346\n",
      "Iteration 75, loss = 0.80053331\n",
      "Iteration 76, loss = 0.78042880\n",
      "Iteration 77, loss = 0.78224701\n",
      "Iteration 78, loss = 0.79864557\n",
      "Iteration 79, loss = 0.77705798\n",
      "Iteration 80, loss = 0.77672831\n",
      "Iteration 81, loss = 0.77181996\n",
      "Iteration 82, loss = 0.76245021\n",
      "Iteration 83, loss = 0.75695370\n",
      "Iteration 84, loss = 0.75822321\n",
      "Iteration 85, loss = 0.75775834\n",
      "Iteration 86, loss = 0.76003613\n",
      "Iteration 87, loss = 0.75157511\n",
      "Iteration 88, loss = 0.76450346\n",
      "Iteration 89, loss = 0.74290195\n",
      "Iteration 90, loss = 0.73916124\n",
      "Iteration 91, loss = 0.74085763\n",
      "Iteration 92, loss = 0.73502704\n",
      "Iteration 93, loss = 0.74033840\n",
      "Iteration 94, loss = 0.72947288\n",
      "Iteration 95, loss = 0.72998232\n",
      "Iteration 96, loss = 0.71767328\n",
      "Iteration 97, loss = 0.72676473\n",
      "Iteration 98, loss = 0.72266413\n",
      "Iteration 99, loss = 0.71395060\n",
      "Iteration 100, loss = 0.71882636\n",
      "Iteration 101, loss = 0.71135058\n",
      "Iteration 102, loss = 0.70714428\n",
      "Iteration 103, loss = 0.70294075\n",
      "Iteration 104, loss = 0.70333291\n",
      "Iteration 105, loss = 0.70427902\n",
      "Iteration 106, loss = 0.70546918\n",
      "Iteration 107, loss = 0.69684517\n",
      "Iteration 108, loss = 0.69278470\n",
      "Iteration 109, loss = 0.69188990\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 110, loss = 0.68890146\n",
      "Iteration 111, loss = 0.68863842\n",
      "Iteration 112, loss = 0.69992406\n",
      "Iteration 113, loss = 0.68751342\n",
      "Iteration 114, loss = 0.67757207\n",
      "Iteration 115, loss = 0.68175744\n",
      "Iteration 116, loss = 0.67585942\n",
      "Iteration 117, loss = 0.69589431\n",
      "Iteration 118, loss = 0.68680516\n",
      "Iteration 119, loss = 0.69713504\n",
      "Iteration 120, loss = 0.67251546\n",
      "Iteration 121, loss = 0.67887034\n",
      "Iteration 122, loss = 0.66858488\n",
      "Iteration 123, loss = 0.66080133\n",
      "Iteration 124, loss = 0.66742365\n",
      "Iteration 125, loss = 0.66241052\n",
      "Iteration 126, loss = 0.65910721\n",
      "Iteration 127, loss = 0.65922467\n",
      "Iteration 128, loss = 0.65404778\n",
      "Iteration 129, loss = 0.64986390\n",
      "Iteration 130, loss = 0.64183725\n",
      "Iteration 131, loss = 0.64581548\n",
      "Iteration 132, loss = 0.64548668\n",
      "Iteration 133, loss = 0.64299558\n",
      "Iteration 134, loss = 0.63755053\n",
      "Iteration 135, loss = 0.65859479\n",
      "Iteration 136, loss = 0.63588499\n",
      "Iteration 137, loss = 0.63433703\n",
      "Iteration 138, loss = 0.63093810\n",
      "Iteration 139, loss = 0.64265829\n",
      "Iteration 140, loss = 0.63026727\n",
      "Iteration 141, loss = 0.62460347\n",
      "Iteration 142, loss = 0.63188026\n",
      "Iteration 143, loss = 0.62377959\n",
      "Iteration 144, loss = 0.64055153\n",
      "Iteration 145, loss = 0.62501637\n",
      "Iteration 146, loss = 0.62192719\n",
      "Iteration 147, loss = 0.62175742\n",
      "Iteration 148, loss = 0.61888002\n",
      "Iteration 149, loss = 0.61318485\n",
      "Iteration 150, loss = 0.61311739\n",
      "Iteration 151, loss = 0.61874117\n",
      "Iteration 152, loss = 0.61535200\n",
      "Iteration 153, loss = 0.60973308\n",
      "Iteration 154, loss = 0.60937650\n",
      "Iteration 155, loss = 0.60191280\n",
      "Iteration 156, loss = 0.60036502\n",
      "Iteration 157, loss = 0.61011043\n",
      "Iteration 158, loss = 0.60516793\n",
      "Iteration 159, loss = 0.61093285\n",
      "Iteration 160, loss = 0.60025436\n",
      "Iteration 161, loss = 0.59284668\n",
      "Iteration 162, loss = 0.59674378\n",
      "Iteration 163, loss = 0.58977813\n",
      "Iteration 164, loss = 0.58723669\n",
      "Iteration 165, loss = 0.60231604\n",
      "Iteration 166, loss = 0.59858386\n",
      "Iteration 167, loss = 0.58977586\n",
      "Iteration 168, loss = 0.60033135\n",
      "Iteration 169, loss = 0.59702500\n",
      "Iteration 170, loss = 0.58366548\n",
      "Iteration 171, loss = 0.57988677\n",
      "Iteration 172, loss = 0.58356045\n",
      "Iteration 173, loss = 0.58013823\n",
      "Iteration 174, loss = 0.58062481\n",
      "Iteration 175, loss = 0.57501364\n",
      "Iteration 176, loss = 0.57918694\n",
      "Iteration 177, loss = 0.57762500\n",
      "Iteration 178, loss = 0.56784994\n",
      "Iteration 179, loss = 0.57407578\n",
      "Iteration 180, loss = 0.57457667\n",
      "Iteration 181, loss = 0.56623018\n",
      "Iteration 182, loss = 0.56397188\n",
      "Iteration 183, loss = 0.57129394\n",
      "Iteration 184, loss = 0.57633512\n",
      "Iteration 185, loss = 0.55966224\n",
      "Iteration 186, loss = 0.56652181\n",
      "Iteration 187, loss = 0.56427890\n",
      "Iteration 188, loss = 0.58098487\n",
      "Iteration 189, loss = 0.56008102\n",
      "Iteration 190, loss = 0.56935223\n",
      "Iteration 191, loss = 0.55291606\n",
      "Iteration 192, loss = 0.55415373\n",
      "Iteration 193, loss = 0.55692199\n",
      "Iteration 194, loss = 0.56268661\n",
      "Iteration 195, loss = 0.55266134\n",
      "Iteration 196, loss = 0.55034481\n",
      "Iteration 197, loss = 0.55471825\n",
      "Iteration 198, loss = 0.55041882\n",
      "Iteration 199, loss = 0.54852862\n",
      "Iteration 200, loss = 0.54966466\n",
      "Iteration 201, loss = 0.54612934\n",
      "Iteration 202, loss = 0.54243253\n",
      "Iteration 203, loss = 0.54955675\n",
      "Iteration 204, loss = 0.54209180\n",
      "Iteration 205, loss = 0.54022833\n",
      "Iteration 206, loss = 0.54557746\n",
      "Iteration 207, loss = 0.53665243\n",
      "Iteration 208, loss = 0.53910822\n",
      "Iteration 209, loss = 0.53733472\n",
      "Iteration 210, loss = 0.53150571\n",
      "Iteration 211, loss = 0.53818416\n",
      "Iteration 212, loss = 0.53137450\n",
      "Iteration 213, loss = 0.52935164\n",
      "Iteration 214, loss = 0.53300352\n",
      "Iteration 215, loss = 0.52760895\n",
      "Iteration 216, loss = 0.52451106\n",
      "Iteration 217, loss = 0.52626941\n",
      "Iteration 218, loss = 0.52079246\n",
      "Iteration 219, loss = 0.52371270\n",
      "Iteration 220, loss = 0.52241696\n",
      "Iteration 221, loss = 0.52331296\n",
      "Iteration 222, loss = 0.52606926\n",
      "Iteration 223, loss = 0.52885122\n",
      "Iteration 224, loss = 0.52005214\n",
      "Iteration 225, loss = 0.52519228\n",
      "Iteration 226, loss = 0.51855721\n",
      "Iteration 227, loss = 0.54904444\n",
      "Iteration 228, loss = 0.51676173\n",
      "Iteration 229, loss = 0.52126221\n",
      "Iteration 230, loss = 0.52110738\n",
      "Iteration 231, loss = 0.51856861\n",
      "Iteration 232, loss = 0.51611998\n",
      "Iteration 233, loss = 0.51487896\n",
      "Iteration 234, loss = 0.51714640\n",
      "Iteration 235, loss = 0.51597383\n",
      "Iteration 236, loss = 0.51297953\n",
      "Iteration 237, loss = 0.51134370\n",
      "Iteration 238, loss = 0.51420470\n",
      "Iteration 239, loss = 0.50759291\n",
      "Iteration 240, loss = 0.50572681\n",
      "Iteration 241, loss = 0.50321601\n",
      "Iteration 242, loss = 0.50782074\n",
      "Iteration 243, loss = 0.50108206\n",
      "Iteration 244, loss = 0.50353479\n",
      "Iteration 245, loss = 0.50093891\n",
      "Iteration 246, loss = 0.50141296\n",
      "Iteration 247, loss = 0.50268399\n",
      "Iteration 248, loss = 0.49686368\n",
      "Iteration 249, loss = 0.50388058\n",
      "Iteration 250, loss = 0.50815541\n",
      "Iteration 251, loss = 0.49501254\n",
      "Iteration 252, loss = 0.49611377\n",
      "Iteration 253, loss = 0.49362951\n",
      "Iteration 254, loss = 0.49590765\n",
      "Iteration 255, loss = 0.49165047\n",
      "Iteration 256, loss = 0.49681883\n",
      "Iteration 257, loss = 0.49007228\n",
      "Iteration 258, loss = 0.49138904\n",
      "Iteration 259, loss = 0.49079459\n",
      "Iteration 260, loss = 0.49255323\n",
      "Iteration 261, loss = 0.48872979\n",
      "Iteration 262, loss = 0.48338458\n",
      "Iteration 263, loss = 0.48754314\n",
      "Iteration 264, loss = 0.49007092\n",
      "Iteration 265, loss = 0.48654645\n",
      "Iteration 266, loss = 0.48465153\n",
      "Iteration 267, loss = 0.47666908\n",
      "Iteration 268, loss = 0.47849554\n",
      "Iteration 269, loss = 0.48716717\n",
      "Iteration 270, loss = 0.48388512\n",
      "Iteration 271, loss = 0.48659867\n",
      "Iteration 272, loss = 0.47939701\n",
      "Iteration 273, loss = 0.47920439\n",
      "Iteration 274, loss = 0.47698118\n",
      "Iteration 275, loss = 0.47873905\n",
      "Iteration 276, loss = 0.47858944\n",
      "Iteration 277, loss = 0.47368830\n",
      "Iteration 278, loss = 0.47435200\n",
      "Iteration 279, loss = 0.47504221\n",
      "Iteration 280, loss = 0.47824278\n",
      "Iteration 281, loss = 0.47536724\n",
      "Iteration 282, loss = 0.46548765\n",
      "Iteration 283, loss = 0.46748439\n",
      "Iteration 284, loss = 0.46884400\n",
      "Iteration 285, loss = 0.47208462\n",
      "Iteration 286, loss = 0.47140599\n",
      "Iteration 287, loss = 0.46515056\n",
      "Iteration 288, loss = 0.46750721\n",
      "Iteration 289, loss = 0.46383306\n",
      "Iteration 290, loss = 0.46420403\n",
      "Iteration 291, loss = 0.46095282\n",
      "Iteration 292, loss = 0.46505796\n",
      "Iteration 293, loss = 0.46801566\n",
      "Iteration 294, loss = 0.46111720\n",
      "Iteration 295, loss = 0.46538501\n",
      "Iteration 296, loss = 0.46106856\n",
      "Iteration 297, loss = 0.46094327\n",
      "Iteration 298, loss = 0.45752843\n",
      "Iteration 299, loss = 0.45702370\n",
      "Iteration 300, loss = 0.45773545\n",
      "Iteration 301, loss = 0.46299217\n",
      "Iteration 302, loss = 0.46171803\n",
      "Iteration 303, loss = 0.45538492\n",
      "Iteration 304, loss = 0.45520439\n",
      "Iteration 305, loss = 0.45416116\n",
      "Iteration 306, loss = 0.45411643\n",
      "Iteration 307, loss = 0.45500620\n",
      "Iteration 308, loss = 0.45493944\n",
      "Iteration 309, loss = 0.45449169\n",
      "Iteration 310, loss = 0.45155689\n",
      "Iteration 311, loss = 0.45084681\n",
      "Iteration 312, loss = 0.44918137\n",
      "Iteration 313, loss = 0.45095929\n",
      "Iteration 314, loss = 0.45138382\n",
      "Iteration 315, loss = 0.45143192\n",
      "Iteration 316, loss = 0.44465134\n",
      "Iteration 317, loss = 0.45025984\n",
      "Iteration 318, loss = 0.45444854\n",
      "Iteration 319, loss = 0.45621856\n",
      "Iteration 320, loss = 0.45320227\n",
      "Iteration 321, loss = 0.45006385\n",
      "Iteration 322, loss = 0.44700831\n",
      "Iteration 323, loss = 0.44952157\n",
      "Iteration 324, loss = 0.44966480\n",
      "Iteration 325, loss = 0.44172947\n",
      "Iteration 326, loss = 0.44144089\n",
      "Iteration 327, loss = 0.44863774\n",
      "Iteration 328, loss = 0.43876884\n",
      "Iteration 329, loss = 0.43793260\n",
      "Iteration 330, loss = 0.44173578\n",
      "Iteration 331, loss = 0.43282913\n",
      "Iteration 332, loss = 0.43611917\n",
      "Iteration 333, loss = 0.43469484\n",
      "Iteration 334, loss = 0.43541302\n",
      "Iteration 335, loss = 0.43639087\n",
      "Iteration 336, loss = 0.43807910\n",
      "Iteration 337, loss = 0.43934195\n",
      "Iteration 338, loss = 0.43542339\n",
      "Iteration 339, loss = 0.43721181\n",
      "Iteration 340, loss = 0.43025943\n",
      "Iteration 341, loss = 0.44639424\n",
      "Iteration 342, loss = 0.43588817\n",
      "Iteration 343, loss = 0.43180823\n",
      "Iteration 344, loss = 0.42798059\n",
      "Iteration 345, loss = 0.43036961\n",
      "Iteration 346, loss = 0.43315829\n",
      "Iteration 347, loss = 0.42368754\n",
      "Iteration 348, loss = 0.42774259\n",
      "Iteration 349, loss = 0.43263388\n",
      "Iteration 350, loss = 0.42592913\n",
      "Iteration 351, loss = 0.43089572\n",
      "Iteration 352, loss = 0.42739853\n",
      "Iteration 353, loss = 0.42687563\n",
      "Iteration 354, loss = 0.43810430\n",
      "Iteration 355, loss = 0.42114926\n",
      "Iteration 356, loss = 0.42656142\n",
      "Iteration 357, loss = 0.42562720\n",
      "Iteration 358, loss = 0.42932340\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 359, loss = 0.42551154\n",
      "Iteration 360, loss = 0.42028302\n",
      "Iteration 361, loss = 0.41993439\n",
      "Iteration 362, loss = 0.42490295\n",
      "Iteration 363, loss = 0.42146771\n",
      "Iteration 364, loss = 0.42396181\n",
      "Iteration 365, loss = 0.42580058\n",
      "Iteration 366, loss = 0.42068661\n",
      "Iteration 367, loss = 0.41528527\n",
      "Iteration 368, loss = 0.42154424\n",
      "Iteration 369, loss = 0.42100727\n",
      "Iteration 370, loss = 0.42937718\n",
      "Iteration 371, loss = 0.41868906\n",
      "Iteration 372, loss = 0.41365890\n",
      "Iteration 373, loss = 0.41353088\n",
      "Iteration 374, loss = 0.42674990\n",
      "Iteration 375, loss = 0.42165367\n",
      "Iteration 376, loss = 0.41718966\n",
      "Iteration 377, loss = 0.41588859\n",
      "Iteration 378, loss = 0.41420421\n",
      "Iteration 379, loss = 0.42051608\n",
      "Iteration 380, loss = 0.41015139\n",
      "Iteration 381, loss = 0.40894231\n",
      "Iteration 382, loss = 0.41300358\n",
      "Iteration 383, loss = 0.41979270\n",
      "Iteration 384, loss = 0.44428613\n",
      "Iteration 385, loss = 0.41708739\n",
      "Iteration 386, loss = 0.41906105\n",
      "Iteration 387, loss = 0.41164476\n",
      "Iteration 388, loss = 0.41487483\n",
      "Iteration 389, loss = 0.40988984\n",
      "Iteration 390, loss = 0.40795770\n",
      "Iteration 391, loss = 0.41270890\n",
      "Iteration 392, loss = 0.41069719\n",
      "Iteration 393, loss = 0.40016718\n",
      "Iteration 394, loss = 0.40792567\n",
      "Iteration 395, loss = 0.40773757\n",
      "Iteration 396, loss = 0.40662222\n",
      "Iteration 397, loss = 0.40991834\n",
      "Iteration 398, loss = 0.40482525\n",
      "Iteration 399, loss = 0.41253511\n",
      "Iteration 400, loss = 0.41690409\n",
      "Iteration 401, loss = 0.41674154\n",
      "Iteration 402, loss = 0.40781868\n",
      "Iteration 403, loss = 0.40443510\n",
      "Iteration 404, loss = 0.40414579\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.0467712208379716\n",
      "Iteration 1, loss = 16.46749610\n",
      "Iteration 2, loss = 3.37654333\n",
      "Iteration 3, loss = 2.32006749\n",
      "Iteration 4, loss = 1.95165380\n",
      "Iteration 5, loss = 1.77782076\n",
      "Iteration 6, loss = 1.68560228\n",
      "Iteration 7, loss = 1.62553043\n",
      "Iteration 8, loss = 1.59882843\n",
      "Iteration 9, loss = 1.56216838\n",
      "Iteration 10, loss = 1.53921709\n",
      "Iteration 11, loss = 1.51509438\n",
      "Iteration 12, loss = 1.49843929\n",
      "Iteration 13, loss = 1.47493532\n",
      "Iteration 14, loss = 1.45771205\n",
      "Iteration 15, loss = 1.44107949\n",
      "Iteration 16, loss = 1.42254569\n",
      "Iteration 17, loss = 1.39953636\n",
      "Iteration 18, loss = 1.38006531\n",
      "Iteration 19, loss = 1.36363166\n",
      "Iteration 20, loss = 1.34473185\n",
      "Iteration 21, loss = 1.32380658\n",
      "Iteration 22, loss = 1.30959792\n",
      "Iteration 23, loss = 1.28618897\n",
      "Iteration 24, loss = 1.26899944\n",
      "Iteration 25, loss = 1.24607106\n",
      "Iteration 26, loss = 1.23487088\n",
      "Iteration 27, loss = 1.21083416\n",
      "Iteration 28, loss = 1.18868011\n",
      "Iteration 29, loss = 1.16733525\n",
      "Iteration 30, loss = 1.15173480\n",
      "Iteration 31, loss = 1.13080830\n",
      "Iteration 32, loss = 1.11555447\n",
      "Iteration 33, loss = 1.09678159\n",
      "Iteration 34, loss = 1.08540144\n",
      "Iteration 35, loss = 1.06128891\n",
      "Iteration 36, loss = 1.05145313\n",
      "Iteration 37, loss = 1.04371379\n",
      "Iteration 38, loss = 1.03144509\n",
      "Iteration 39, loss = 1.00940108\n",
      "Iteration 40, loss = 0.99931810\n",
      "Iteration 41, loss = 0.98804026\n",
      "Iteration 42, loss = 0.97387390\n",
      "Iteration 43, loss = 0.96391347\n",
      "Iteration 44, loss = 0.95516499\n",
      "Iteration 45, loss = 0.94966223\n",
      "Iteration 46, loss = 0.94133068\n",
      "Iteration 47, loss = 0.93262359\n",
      "Iteration 48, loss = 0.92511498\n",
      "Iteration 49, loss = 0.91938803\n",
      "Iteration 50, loss = 0.91554882\n",
      "Iteration 51, loss = 0.90855055\n",
      "Iteration 52, loss = 0.90095808\n",
      "Iteration 53, loss = 0.89445691\n",
      "Iteration 54, loss = 0.88894132\n",
      "Iteration 55, loss = 0.88525676\n",
      "Iteration 56, loss = 0.87734870\n",
      "Iteration 57, loss = 0.87410507\n",
      "Iteration 58, loss = 0.87360793\n",
      "Iteration 59, loss = 0.86357352\n",
      "Iteration 60, loss = 0.85863870\n",
      "Iteration 61, loss = 0.85299831\n",
      "Iteration 62, loss = 0.84425865\n",
      "Iteration 63, loss = 0.84258718\n",
      "Iteration 64, loss = 0.84355917\n",
      "Iteration 65, loss = 0.83285708\n",
      "Iteration 66, loss = 0.82799371\n",
      "Iteration 67, loss = 0.82614610\n",
      "Iteration 68, loss = 0.82063613\n",
      "Iteration 69, loss = 0.82309886\n",
      "Iteration 70, loss = 0.81515413\n",
      "Iteration 71, loss = 0.81864636\n",
      "Iteration 72, loss = 0.80400598\n",
      "Iteration 73, loss = 0.80291787\n",
      "Iteration 74, loss = 0.79265554\n",
      "Iteration 75, loss = 0.79408245\n",
      "Iteration 76, loss = 0.78729053\n",
      "Iteration 77, loss = 0.79055399\n",
      "Iteration 78, loss = 0.78470263\n",
      "Iteration 79, loss = 0.77315257\n",
      "Iteration 80, loss = 0.77314535\n",
      "Iteration 81, loss = 0.76965146\n",
      "Iteration 82, loss = 0.76733520\n",
      "Iteration 83, loss = 0.76867339\n",
      "Iteration 84, loss = 0.76309670\n",
      "Iteration 85, loss = 0.76249018\n",
      "Iteration 86, loss = 0.75582847\n",
      "Iteration 87, loss = 0.74266434\n",
      "Iteration 88, loss = 0.74733483\n",
      "Iteration 89, loss = 0.75036991\n",
      "Iteration 90, loss = 0.74669696\n",
      "Iteration 91, loss = 0.74368763\n",
      "Iteration 92, loss = 0.73718153\n",
      "Iteration 93, loss = 0.73066843\n",
      "Iteration 94, loss = 0.74600980\n",
      "Iteration 95, loss = 0.72237108\n",
      "Iteration 96, loss = 0.72210236\n",
      "Iteration 97, loss = 0.72374521\n",
      "Iteration 98, loss = 0.72318229\n",
      "Iteration 99, loss = 0.72094268\n",
      "Iteration 100, loss = 0.71169017\n",
      "Iteration 101, loss = 0.70888669\n",
      "Iteration 102, loss = 0.70999262\n",
      "Iteration 103, loss = 0.70367725\n",
      "Iteration 104, loss = 0.70696260\n",
      "Iteration 105, loss = 0.70114179\n",
      "Iteration 106, loss = 0.69867776\n",
      "Iteration 107, loss = 0.69453358\n",
      "Iteration 108, loss = 0.68908410\n",
      "Iteration 109, loss = 0.68600671\n",
      "Iteration 110, loss = 0.68862070\n",
      "Iteration 111, loss = 0.69221655\n",
      "Iteration 112, loss = 0.68303202\n",
      "Iteration 113, loss = 0.67866613\n",
      "Iteration 114, loss = 0.67600380\n",
      "Iteration 115, loss = 0.68247700\n",
      "Iteration 116, loss = 0.67636521\n",
      "Iteration 117, loss = 0.67291057\n",
      "Iteration 118, loss = 0.67134316\n",
      "Iteration 119, loss = 0.66615128\n",
      "Iteration 120, loss = 0.66853841\n",
      "Iteration 121, loss = 0.66301234\n",
      "Iteration 122, loss = 0.65835453\n",
      "Iteration 123, loss = 0.66508891\n",
      "Iteration 124, loss = 0.65615022\n",
      "Iteration 125, loss = 0.65439923\n",
      "Iteration 126, loss = 0.64806322\n",
      "Iteration 127, loss = 0.65355663\n",
      "Iteration 128, loss = 0.65042672\n",
      "Iteration 129, loss = 0.65235628\n",
      "Iteration 130, loss = 0.64116069\n",
      "Iteration 131, loss = 0.64092595\n",
      "Iteration 132, loss = 0.63409888\n",
      "Iteration 133, loss = 0.64085274\n",
      "Iteration 134, loss = 0.63688909\n",
      "Iteration 135, loss = 0.63656497\n",
      "Iteration 136, loss = 0.63885458\n",
      "Iteration 137, loss = 0.63337923\n",
      "Iteration 138, loss = 0.62888776\n",
      "Iteration 139, loss = 0.62994524\n",
      "Iteration 140, loss = 0.62864664\n",
      "Iteration 141, loss = 0.62399746\n",
      "Iteration 142, loss = 0.62633095\n",
      "Iteration 143, loss = 0.61931334\n",
      "Iteration 144, loss = 0.61945133\n",
      "Iteration 145, loss = 0.61623591\n",
      "Iteration 146, loss = 0.62277772\n",
      "Iteration 147, loss = 0.62553062\n",
      "Iteration 148, loss = 0.61269352\n",
      "Iteration 149, loss = 0.60551852\n",
      "Iteration 150, loss = 0.60753704\n",
      "Iteration 151, loss = 0.60571050\n",
      "Iteration 152, loss = 0.60042302\n",
      "Iteration 153, loss = 0.60035050\n",
      "Iteration 154, loss = 0.60219323\n",
      "Iteration 155, loss = 0.60245687\n",
      "Iteration 156, loss = 0.60443537\n",
      "Iteration 157, loss = 0.59649936\n",
      "Iteration 158, loss = 0.59778594\n",
      "Iteration 159, loss = 0.59389705\n",
      "Iteration 160, loss = 0.59283281\n",
      "Iteration 161, loss = 0.58921006\n",
      "Iteration 162, loss = 0.58843961\n",
      "Iteration 163, loss = 0.58192579\n",
      "Iteration 164, loss = 0.58545644\n",
      "Iteration 165, loss = 0.59035198\n",
      "Iteration 166, loss = 0.58710739\n",
      "Iteration 167, loss = 0.57675217\n",
      "Iteration 168, loss = 0.57557429\n",
      "Iteration 169, loss = 0.58647240\n",
      "Iteration 170, loss = 0.57569644\n",
      "Iteration 171, loss = 0.57475764\n",
      "Iteration 172, loss = 0.57216639\n",
      "Iteration 173, loss = 0.57009141\n",
      "Iteration 174, loss = 0.57000922\n",
      "Iteration 175, loss = 0.56936709\n",
      "Iteration 176, loss = 0.56584560\n",
      "Iteration 177, loss = 0.56549935\n",
      "Iteration 178, loss = 0.56600606\n",
      "Iteration 179, loss = 0.56561267\n",
      "Iteration 180, loss = 0.56338161\n",
      "Iteration 181, loss = 0.55677376\n",
      "Iteration 182, loss = 0.56179277\n",
      "Iteration 183, loss = 0.55588805\n",
      "Iteration 184, loss = 0.55526529\n",
      "Iteration 185, loss = 0.55500462\n",
      "Iteration 186, loss = 0.55340635\n",
      "Iteration 187, loss = 0.55542205\n",
      "Iteration 188, loss = 0.55132566\n",
      "Iteration 189, loss = 0.55074448\n",
      "Iteration 190, loss = 0.55749439\n",
      "Iteration 191, loss = 0.55075523\n",
      "Iteration 192, loss = 0.54122992\n",
      "Iteration 193, loss = 0.54435289\n",
      "Iteration 194, loss = 0.54108003\n",
      "Iteration 195, loss = 0.54394484\n",
      "Iteration 196, loss = 0.53930821\n",
      "Iteration 197, loss = 0.54604246\n",
      "Iteration 198, loss = 0.55461860\n",
      "Iteration 199, loss = 0.53740126\n",
      "Iteration 200, loss = 0.53951437\n",
      "Iteration 201, loss = 0.53326519\n",
      "Iteration 202, loss = 0.53760981\n",
      "Iteration 203, loss = 0.55776258\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 204, loss = 0.53279804\n",
      "Iteration 205, loss = 0.52884803\n",
      "Iteration 206, loss = 0.53187519\n",
      "Iteration 207, loss = 0.53024482\n",
      "Iteration 208, loss = 0.52977286\n",
      "Iteration 209, loss = 0.52516152\n",
      "Iteration 210, loss = 0.52289104\n",
      "Iteration 211, loss = 0.51830973\n",
      "Iteration 212, loss = 0.52501504\n",
      "Iteration 213, loss = 0.51634342\n",
      "Iteration 214, loss = 0.52075009\n",
      "Iteration 215, loss = 0.52005447\n",
      "Iteration 216, loss = 0.52027856\n",
      "Iteration 217, loss = 0.52794834\n",
      "Iteration 218, loss = 0.52738286\n",
      "Iteration 219, loss = 0.51310207\n",
      "Iteration 220, loss = 0.51415753\n",
      "Iteration 221, loss = 0.50886894\n",
      "Iteration 222, loss = 0.50654971\n",
      "Iteration 223, loss = 0.51282213\n",
      "Iteration 224, loss = 0.52000766\n",
      "Iteration 225, loss = 0.52420784\n",
      "Iteration 226, loss = 0.52201889\n",
      "Iteration 227, loss = 0.50787524\n",
      "Iteration 228, loss = 0.50121178\n",
      "Iteration 229, loss = 0.50011180\n",
      "Iteration 230, loss = 0.50538262\n",
      "Iteration 231, loss = 0.50648814\n",
      "Iteration 232, loss = 0.49548764\n",
      "Iteration 233, loss = 0.50912090\n",
      "Iteration 234, loss = 0.50566108\n",
      "Iteration 235, loss = 0.49865073\n",
      "Iteration 236, loss = 0.50172314\n",
      "Iteration 237, loss = 0.49569878\n",
      "Iteration 238, loss = 0.49538181\n",
      "Iteration 239, loss = 0.50100623\n",
      "Iteration 240, loss = 0.49204009\n",
      "Iteration 241, loss = 0.49379255\n",
      "Iteration 242, loss = 0.49681026\n",
      "Iteration 243, loss = 0.49314729\n",
      "Iteration 244, loss = 0.49516610\n",
      "Iteration 245, loss = 0.48936333\n",
      "Iteration 246, loss = 0.49348226\n",
      "Iteration 247, loss = 0.48800829\n",
      "Iteration 248, loss = 0.48049134\n",
      "Iteration 249, loss = 0.48435800\n",
      "Iteration 250, loss = 0.48653247\n",
      "Iteration 251, loss = 0.48420866\n",
      "Iteration 252, loss = 0.49388179\n",
      "Iteration 253, loss = 0.48259715\n",
      "Iteration 254, loss = 0.48101403\n",
      "Iteration 255, loss = 0.48216399\n",
      "Iteration 256, loss = 0.47726060\n",
      "Iteration 257, loss = 0.47602341\n",
      "Iteration 258, loss = 0.47542353\n",
      "Iteration 259, loss = 0.48790707\n",
      "Iteration 260, loss = 0.47597461\n",
      "Iteration 261, loss = 0.47146745\n",
      "Iteration 262, loss = 0.47170031\n",
      "Iteration 263, loss = 0.47151832\n",
      "Iteration 264, loss = 0.46940365\n",
      "Iteration 265, loss = 0.48017782\n",
      "Iteration 266, loss = 0.47915558\n",
      "Iteration 267, loss = 0.46859159\n",
      "Iteration 268, loss = 0.47779341\n",
      "Iteration 269, loss = 0.46929977\n",
      "Iteration 270, loss = 0.46862416\n",
      "Iteration 271, loss = 0.47391556\n",
      "Iteration 272, loss = 0.46636389\n",
      "Iteration 273, loss = 0.48258515\n",
      "Iteration 274, loss = 0.46378693\n",
      "Iteration 275, loss = 0.46292756\n",
      "Iteration 276, loss = 0.46301699\n",
      "Iteration 277, loss = 0.46121641\n",
      "Iteration 278, loss = 0.46552664\n",
      "Iteration 279, loss = 0.45697946\n",
      "Iteration 280, loss = 0.45659524\n",
      "Iteration 281, loss = 0.45315585\n",
      "Iteration 282, loss = 0.46053427\n",
      "Iteration 283, loss = 0.45657531\n",
      "Iteration 284, loss = 0.45989133\n",
      "Iteration 285, loss = 0.46383558\n",
      "Iteration 286, loss = 0.45692733\n",
      "Iteration 287, loss = 0.45735607\n",
      "Iteration 288, loss = 0.44596262\n",
      "Iteration 289, loss = 0.45319987\n",
      "Iteration 290, loss = 0.45023244\n",
      "Iteration 291, loss = 0.45156648\n",
      "Iteration 292, loss = 0.45891088\n",
      "Iteration 293, loss = 0.44853080\n",
      "Iteration 294, loss = 0.45216947\n",
      "Iteration 295, loss = 0.45100319\n",
      "Iteration 296, loss = 0.45204735\n",
      "Iteration 297, loss = 0.45083720\n",
      "Iteration 298, loss = 0.45166092\n",
      "Iteration 299, loss = 0.45085279\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "1.9814312257714835\n",
      "Iteration 1, loss = 16.44279188\n",
      "Iteration 2, loss = 3.38136461\n",
      "Iteration 3, loss = 2.32807658\n",
      "Iteration 4, loss = 1.95622540\n",
      "Iteration 5, loss = 1.77758672\n",
      "Iteration 6, loss = 1.68415352\n",
      "Iteration 7, loss = 1.62487063\n",
      "Iteration 8, loss = 1.59316183\n",
      "Iteration 9, loss = 1.56044555\n",
      "Iteration 10, loss = 1.53387259\n",
      "Iteration 11, loss = 1.51337461\n",
      "Iteration 12, loss = 1.49203389\n",
      "Iteration 13, loss = 1.47223380\n",
      "Iteration 14, loss = 1.45215262\n",
      "Iteration 15, loss = 1.43671077\n",
      "Iteration 16, loss = 1.41777174\n",
      "Iteration 17, loss = 1.39516224\n",
      "Iteration 18, loss = 1.37685688\n",
      "Iteration 19, loss = 1.35593155\n",
      "Iteration 20, loss = 1.34068772\n",
      "Iteration 21, loss = 1.31963031\n",
      "Iteration 22, loss = 1.30224569\n",
      "Iteration 23, loss = 1.28051380\n",
      "Iteration 24, loss = 1.26189456\n",
      "Iteration 25, loss = 1.23813327\n",
      "Iteration 26, loss = 1.22441994\n",
      "Iteration 27, loss = 1.19939010\n",
      "Iteration 28, loss = 1.17962537\n",
      "Iteration 29, loss = 1.15763212\n",
      "Iteration 30, loss = 1.14168159\n",
      "Iteration 31, loss = 1.12166185\n",
      "Iteration 32, loss = 1.10053064\n",
      "Iteration 33, loss = 1.08426553\n",
      "Iteration 34, loss = 1.06945497\n",
      "Iteration 35, loss = 1.04655112\n",
      "Iteration 36, loss = 1.03787562\n",
      "Iteration 37, loss = 1.02294442\n",
      "Iteration 38, loss = 1.01358329\n",
      "Iteration 39, loss = 0.99083205\n",
      "Iteration 40, loss = 0.98456193\n",
      "Iteration 41, loss = 0.97112168\n",
      "Iteration 42, loss = 0.95967983\n",
      "Iteration 43, loss = 0.95242795\n",
      "Iteration 44, loss = 0.94130840\n",
      "Iteration 45, loss = 0.93386422\n",
      "Iteration 46, loss = 0.92433979\n",
      "Iteration 47, loss = 0.91668693\n",
      "Iteration 48, loss = 0.90836582\n",
      "Iteration 49, loss = 0.90350177\n",
      "Iteration 50, loss = 0.89678861\n",
      "Iteration 51, loss = 0.89623486\n",
      "Iteration 52, loss = 0.88499093\n",
      "Iteration 53, loss = 0.87423618\n",
      "Iteration 54, loss = 0.87334486\n",
      "Iteration 55, loss = 0.87084458\n",
      "Iteration 56, loss = 0.86197741\n",
      "Iteration 57, loss = 0.85681588\n",
      "Iteration 58, loss = 0.85694279\n",
      "Iteration 59, loss = 0.85049236\n",
      "Iteration 60, loss = 0.84945814\n",
      "Iteration 61, loss = 0.83626533\n",
      "Iteration 62, loss = 0.83103758\n",
      "Iteration 63, loss = 0.82724572\n",
      "Iteration 64, loss = 0.82342706\n",
      "Iteration 65, loss = 0.81636120\n",
      "Iteration 66, loss = 0.81115195\n",
      "Iteration 67, loss = 0.81085410\n",
      "Iteration 68, loss = 0.80630415\n",
      "Iteration 69, loss = 0.80387082\n",
      "Iteration 70, loss = 0.79822749\n",
      "Iteration 71, loss = 0.79648818\n",
      "Iteration 72, loss = 0.78930711\n",
      "Iteration 73, loss = 0.78753019\n",
      "Iteration 74, loss = 0.77905957\n",
      "Iteration 75, loss = 0.78099348\n",
      "Iteration 76, loss = 0.77521558\n",
      "Iteration 77, loss = 0.77362959\n",
      "Iteration 78, loss = 0.77017334\n",
      "Iteration 79, loss = 0.76384545\n",
      "Iteration 80, loss = 0.76135354\n",
      "Iteration 81, loss = 0.76299836\n",
      "Iteration 82, loss = 0.75662821\n",
      "Iteration 83, loss = 0.75477763\n",
      "Iteration 84, loss = 0.75339795\n",
      "Iteration 85, loss = 0.75398645\n",
      "Iteration 86, loss = 0.74126762\n",
      "Iteration 87, loss = 0.73355133\n",
      "Iteration 88, loss = 0.74124553\n",
      "Iteration 89, loss = 0.73543729\n",
      "Iteration 90, loss = 0.73322981\n",
      "Iteration 91, loss = 0.73338312\n",
      "Iteration 92, loss = 0.72909585\n",
      "Iteration 93, loss = 0.71972109\n",
      "Iteration 94, loss = 0.72755846\n",
      "Iteration 95, loss = 0.71724995\n",
      "Iteration 96, loss = 0.71313754\n",
      "Iteration 97, loss = 0.72066761\n",
      "Iteration 98, loss = 0.71085906\n",
      "Iteration 99, loss = 0.71066610\n",
      "Iteration 100, loss = 0.70156140\n",
      "Iteration 101, loss = 0.70106358\n",
      "Iteration 102, loss = 0.70173566\n",
      "Iteration 103, loss = 0.69419987\n",
      "Iteration 104, loss = 0.69976088\n",
      "Iteration 105, loss = 0.69425189\n",
      "Iteration 106, loss = 0.69290974\n",
      "Iteration 107, loss = 0.69293020\n",
      "Iteration 108, loss = 0.68623536\n",
      "Iteration 109, loss = 0.67800632\n",
      "Iteration 110, loss = 0.68521201\n",
      "Iteration 111, loss = 0.68058717\n",
      "Iteration 112, loss = 0.67722160\n",
      "Iteration 113, loss = 0.67215908\n",
      "Iteration 114, loss = 0.67389277\n",
      "Iteration 115, loss = 0.66953120\n",
      "Iteration 116, loss = 0.67259442\n",
      "Iteration 117, loss = 0.66439077\n",
      "Iteration 118, loss = 0.66446541\n",
      "Iteration 119, loss = 0.65914739\n",
      "Iteration 120, loss = 0.65887108\n",
      "Iteration 121, loss = 0.65754517\n",
      "Iteration 122, loss = 0.65338871\n",
      "Iteration 123, loss = 0.65488296\n",
      "Iteration 124, loss = 0.65102356\n",
      "Iteration 125, loss = 0.64855587\n",
      "Iteration 126, loss = 0.64321477\n",
      "Iteration 127, loss = 0.64646845\n",
      "Iteration 128, loss = 0.64376027\n",
      "Iteration 129, loss = 0.64318656\n",
      "Iteration 130, loss = 0.63775764\n",
      "Iteration 131, loss = 0.63926669\n",
      "Iteration 132, loss = 0.63432026\n",
      "Iteration 133, loss = 0.63644590\n",
      "Iteration 134, loss = 0.63795918\n",
      "Iteration 135, loss = 0.63087477\n",
      "Iteration 136, loss = 0.63505482\n",
      "Iteration 137, loss = 0.62861816\n",
      "Iteration 138, loss = 0.62706771\n",
      "Iteration 139, loss = 0.63068272\n",
      "Iteration 140, loss = 0.62158269\n",
      "Iteration 141, loss = 0.62076147\n",
      "Iteration 142, loss = 0.62104227\n",
      "Iteration 143, loss = 0.61703651\n",
      "Iteration 144, loss = 0.61907367\n",
      "Iteration 145, loss = 0.61253659\n",
      "Iteration 146, loss = 0.61289022\n",
      "Iteration 147, loss = 0.61412675\n",
      "Iteration 148, loss = 0.61113828\n",
      "Iteration 149, loss = 0.60745733\n",
      "Iteration 150, loss = 0.60402102\n",
      "Iteration 151, loss = 0.60526729\n",
      "Iteration 152, loss = 0.60460321\n",
      "Iteration 153, loss = 0.60202237\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 154, loss = 0.59987463\n",
      "Iteration 155, loss = 0.60125228\n",
      "Iteration 156, loss = 0.59827373\n",
      "Iteration 157, loss = 0.59375850\n",
      "Iteration 158, loss = 0.59700253\n",
      "Iteration 159, loss = 0.59342044\n",
      "Iteration 160, loss = 0.59588039\n",
      "Iteration 161, loss = 0.59044986\n",
      "Iteration 162, loss = 0.58967897\n",
      "Iteration 163, loss = 0.58409718\n",
      "Iteration 164, loss = 0.58375715\n",
      "Iteration 165, loss = 0.58730016\n",
      "Iteration 166, loss = 0.58162527\n",
      "Iteration 167, loss = 0.57941692\n",
      "Iteration 168, loss = 0.57275677\n",
      "Iteration 169, loss = 0.58010264\n",
      "Iteration 170, loss = 0.57526214\n",
      "Iteration 171, loss = 0.58051054\n",
      "Iteration 172, loss = 0.57735895\n",
      "Iteration 173, loss = 0.57063122\n",
      "Iteration 174, loss = 0.57219998\n",
      "Iteration 175, loss = 0.57284290\n",
      "Iteration 176, loss = 0.56521915\n",
      "Iteration 177, loss = 0.56938206\n",
      "Iteration 178, loss = 0.56880967\n",
      "Iteration 179, loss = 0.56643702\n",
      "Iteration 180, loss = 0.56071146\n",
      "Iteration 181, loss = 0.55909916\n",
      "Iteration 182, loss = 0.55819394\n",
      "Iteration 183, loss = 0.55861666\n",
      "Iteration 184, loss = 0.55851707\n",
      "Iteration 185, loss = 0.55539023\n",
      "Iteration 186, loss = 0.55604034\n",
      "Iteration 187, loss = 0.55263879\n",
      "Iteration 188, loss = 0.55122972\n",
      "Iteration 189, loss = 0.54913652\n",
      "Iteration 190, loss = 0.54973905\n",
      "Iteration 191, loss = 0.54882584\n",
      "Iteration 192, loss = 0.54557112\n",
      "Iteration 193, loss = 0.54785592\n",
      "Iteration 194, loss = 0.54419290\n",
      "Iteration 195, loss = 0.54566856\n",
      "Iteration 196, loss = 0.54061834\n",
      "Iteration 197, loss = 0.54331523\n",
      "Iteration 198, loss = 0.54261746\n",
      "Iteration 199, loss = 0.53781900\n",
      "Iteration 200, loss = 0.53824862\n",
      "Iteration 201, loss = 0.53964486\n",
      "Iteration 202, loss = 0.53959643\n",
      "Iteration 203, loss = 0.54438027\n",
      "Iteration 204, loss = 0.53225913\n",
      "Iteration 205, loss = 0.53199344\n",
      "Iteration 206, loss = 0.53481217\n",
      "Iteration 207, loss = 0.53106903\n",
      "Iteration 208, loss = 0.53015934\n",
      "Iteration 209, loss = 0.52972583\n",
      "Iteration 210, loss = 0.52592662\n",
      "Iteration 211, loss = 0.52230405\n",
      "Iteration 212, loss = 0.52730156\n",
      "Iteration 213, loss = 0.51801270\n",
      "Iteration 214, loss = 0.52293793\n",
      "Iteration 215, loss = 0.52019653\n",
      "Iteration 216, loss = 0.52747344\n",
      "Iteration 217, loss = 0.51722866\n",
      "Iteration 218, loss = 0.52241110\n",
      "Iteration 219, loss = 0.51773490\n",
      "Iteration 220, loss = 0.51715024\n",
      "Iteration 221, loss = 0.51523175\n",
      "Iteration 222, loss = 0.50775208\n",
      "Iteration 223, loss = 0.51342132\n",
      "Iteration 224, loss = 0.51065332\n",
      "Iteration 225, loss = 0.51896490\n",
      "Iteration 226, loss = 0.51214143\n",
      "Iteration 227, loss = 0.51453636\n",
      "Iteration 228, loss = 0.50224217\n",
      "Iteration 229, loss = 0.50287577\n",
      "Iteration 230, loss = 0.50651188\n",
      "Iteration 231, loss = 0.50889327\n",
      "Iteration 232, loss = 0.49998585\n",
      "Iteration 233, loss = 0.49936968\n",
      "Iteration 234, loss = 0.50606211\n",
      "Iteration 235, loss = 0.49845658\n",
      "Iteration 236, loss = 0.50432087\n",
      "Iteration 237, loss = 0.49820268\n",
      "Iteration 238, loss = 0.49836986\n",
      "Iteration 239, loss = 0.49628269\n",
      "Iteration 240, loss = 0.49057770\n",
      "Iteration 241, loss = 0.49591993\n",
      "Iteration 242, loss = 0.49696256\n",
      "Iteration 243, loss = 0.49238673\n",
      "Iteration 244, loss = 0.49316703\n",
      "Iteration 245, loss = 0.48895725\n",
      "Iteration 246, loss = 0.49535413\n",
      "Iteration 247, loss = 0.48350348\n",
      "Iteration 248, loss = 0.48721703\n",
      "Iteration 249, loss = 0.48707040\n",
      "Iteration 250, loss = 0.48591318\n",
      "Iteration 251, loss = 0.48494386\n",
      "Iteration 252, loss = 0.48529936\n",
      "Iteration 253, loss = 0.48849623\n",
      "Iteration 254, loss = 0.48318006\n",
      "Iteration 255, loss = 0.48475507\n",
      "Iteration 256, loss = 0.47875663\n",
      "Iteration 257, loss = 0.47936595\n",
      "Iteration 258, loss = 0.47642309\n",
      "Iteration 259, loss = 0.48410125\n",
      "Iteration 260, loss = 0.47981128\n",
      "Iteration 261, loss = 0.47673450\n",
      "Iteration 262, loss = 0.47421598\n",
      "Iteration 263, loss = 0.48495892\n",
      "Iteration 264, loss = 0.47134730\n",
      "Iteration 265, loss = 0.47152803\n",
      "Iteration 266, loss = 0.47152291\n",
      "Iteration 267, loss = 0.47672567\n",
      "Iteration 268, loss = 0.47419261\n",
      "Iteration 269, loss = 0.47528339\n",
      "Iteration 270, loss = 0.47260867\n",
      "Iteration 271, loss = 0.46795186\n",
      "Iteration 272, loss = 0.46652500\n",
      "Iteration 273, loss = 0.47581676\n",
      "Iteration 274, loss = 0.46855268\n",
      "Iteration 275, loss = 0.46165748\n",
      "Iteration 276, loss = 0.46613941\n",
      "Iteration 277, loss = 0.46723610\n",
      "Iteration 278, loss = 0.46392459\n",
      "Iteration 279, loss = 0.46464315\n",
      "Iteration 280, loss = 0.46228977\n",
      "Iteration 281, loss = 0.46117379\n",
      "Iteration 282, loss = 0.46170259\n",
      "Iteration 283, loss = 0.46136289\n",
      "Iteration 284, loss = 0.46080964\n",
      "Iteration 285, loss = 0.46545778\n",
      "Iteration 286, loss = 0.46284980\n",
      "Iteration 287, loss = 0.45899817\n",
      "Iteration 288, loss = 0.44937089\n",
      "Iteration 289, loss = 0.45326267\n",
      "Iteration 290, loss = 0.45765186\n",
      "Iteration 291, loss = 0.46087960\n",
      "Iteration 292, loss = 0.45518161\n",
      "Iteration 293, loss = 0.45031534\n",
      "Iteration 294, loss = 0.45503680\n",
      "Iteration 295, loss = 0.45295645\n",
      "Iteration 296, loss = 0.45164428\n",
      "Iteration 297, loss = 0.45283768\n",
      "Iteration 298, loss = 0.45067821\n",
      "Iteration 299, loss = 0.45139395\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.3027722428412356\n",
      "Iteration 1, loss = 16.53953459\n",
      "Iteration 2, loss = 3.42836581\n",
      "Iteration 3, loss = 2.36635993\n",
      "Iteration 4, loss = 1.98808808\n",
      "Iteration 5, loss = 1.81149942\n",
      "Iteration 6, loss = 1.71539842\n",
      "Iteration 7, loss = 1.65786226\n",
      "Iteration 8, loss = 1.62282172\n",
      "Iteration 9, loss = 1.59089039\n",
      "Iteration 10, loss = 1.56150261\n",
      "Iteration 11, loss = 1.54061840\n",
      "Iteration 12, loss = 1.51866030\n",
      "Iteration 13, loss = 1.49547321\n",
      "Iteration 14, loss = 1.47600359\n",
      "Iteration 15, loss = 1.46074379\n",
      "Iteration 16, loss = 1.44052942\n",
      "Iteration 17, loss = 1.42101146\n",
      "Iteration 18, loss = 1.40162100\n",
      "Iteration 19, loss = 1.37846426\n",
      "Iteration 20, loss = 1.36133082\n",
      "Iteration 21, loss = 1.33872103\n",
      "Iteration 22, loss = 1.31885596\n",
      "Iteration 23, loss = 1.29883497\n",
      "Iteration 24, loss = 1.28275273\n",
      "Iteration 25, loss = 1.25470210\n",
      "Iteration 26, loss = 1.24424355\n",
      "Iteration 27, loss = 1.21742989\n",
      "Iteration 28, loss = 1.19550589\n",
      "Iteration 29, loss = 1.17250718\n",
      "Iteration 30, loss = 1.15841339\n",
      "Iteration 31, loss = 1.13877233\n",
      "Iteration 32, loss = 1.11749222\n",
      "Iteration 33, loss = 1.10365381\n",
      "Iteration 34, loss = 1.08982892\n",
      "Iteration 35, loss = 1.06862657\n",
      "Iteration 36, loss = 1.05867005\n",
      "Iteration 37, loss = 1.04445513\n",
      "Iteration 38, loss = 1.03401006\n",
      "Iteration 39, loss = 1.01398988\n",
      "Iteration 40, loss = 1.00747766\n",
      "Iteration 41, loss = 0.99470575\n",
      "Iteration 42, loss = 0.97904838\n",
      "Iteration 43, loss = 0.97408079\n",
      "Iteration 44, loss = 0.96500597\n",
      "Iteration 45, loss = 0.95763144\n",
      "Iteration 46, loss = 0.95851460\n",
      "Iteration 47, loss = 0.94226501\n",
      "Iteration 48, loss = 0.93150854\n",
      "Iteration 49, loss = 0.92699635\n",
      "Iteration 50, loss = 0.91889123\n",
      "Iteration 51, loss = 0.91574403\n",
      "Iteration 52, loss = 0.90842562\n",
      "Iteration 53, loss = 0.89901746\n",
      "Iteration 54, loss = 0.89852222\n",
      "Iteration 55, loss = 0.89138198\n",
      "Iteration 56, loss = 0.88702837\n",
      "Iteration 57, loss = 0.87652995\n",
      "Iteration 58, loss = 0.88564525\n",
      "Iteration 59, loss = 0.86872061\n",
      "Iteration 60, loss = 0.86936052\n",
      "Iteration 61, loss = 0.85651382\n",
      "Iteration 62, loss = 0.84868877\n",
      "Iteration 63, loss = 0.84695349\n",
      "Iteration 64, loss = 0.84206969\n",
      "Iteration 65, loss = 0.83440394\n",
      "Iteration 66, loss = 0.83493298\n",
      "Iteration 67, loss = 0.83143464\n",
      "Iteration 68, loss = 0.83520238\n",
      "Iteration 69, loss = 0.82733178\n",
      "Iteration 70, loss = 0.81941510\n",
      "Iteration 71, loss = 0.81376676\n",
      "Iteration 72, loss = 0.80582502\n",
      "Iteration 73, loss = 0.80763532\n",
      "Iteration 74, loss = 0.80161419\n",
      "Iteration 75, loss = 0.79816865\n",
      "Iteration 76, loss = 0.79504304\n",
      "Iteration 77, loss = 0.79203727\n",
      "Iteration 78, loss = 0.78974027\n",
      "Iteration 79, loss = 0.77946684\n",
      "Iteration 80, loss = 0.78071409\n",
      "Iteration 81, loss = 0.77671121\n",
      "Iteration 82, loss = 0.77505639\n",
      "Iteration 83, loss = 0.77758773\n",
      "Iteration 84, loss = 0.78003516\n",
      "Iteration 85, loss = 0.76886028\n",
      "Iteration 86, loss = 0.76652299\n",
      "Iteration 87, loss = 0.75127863\n",
      "Iteration 88, loss = 0.75790526\n",
      "Iteration 89, loss = 0.75611317\n",
      "Iteration 90, loss = 0.75577357\n",
      "Iteration 91, loss = 0.75397929\n",
      "Iteration 92, loss = 0.74336778\n",
      "Iteration 93, loss = 0.73633904\n",
      "Iteration 94, loss = 0.73354155\n",
      "Iteration 95, loss = 0.72861335\n",
      "Iteration 96, loss = 0.72562991\n",
      "Iteration 97, loss = 0.72739208\n",
      "Iteration 98, loss = 0.72683435\n",
      "Iteration 99, loss = 0.72841189\n",
      "Iteration 100, loss = 0.72051322\n",
      "Iteration 101, loss = 0.71452701\n",
      "Iteration 102, loss = 0.71967699\n",
      "Iteration 103, loss = 0.71765680\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 104, loss = 0.71621889\n",
      "Iteration 105, loss = 0.70708320\n",
      "Iteration 106, loss = 0.71540369\n",
      "Iteration 107, loss = 0.70452938\n",
      "Iteration 108, loss = 0.69935563\n",
      "Iteration 109, loss = 0.69941136\n",
      "Iteration 110, loss = 0.70042758\n",
      "Iteration 111, loss = 0.69752034\n",
      "Iteration 112, loss = 0.69115130\n",
      "Iteration 113, loss = 0.68881496\n",
      "Iteration 114, loss = 0.68644309\n",
      "Iteration 115, loss = 0.68788259\n",
      "Iteration 116, loss = 0.68762693\n",
      "Iteration 117, loss = 0.68383098\n",
      "Iteration 118, loss = 0.67650307\n",
      "Iteration 119, loss = 0.67480864\n",
      "Iteration 120, loss = 0.66912164\n",
      "Iteration 121, loss = 0.67626211\n",
      "Iteration 122, loss = 0.67158875\n",
      "Iteration 123, loss = 0.67451772\n",
      "Iteration 124, loss = 0.66158339\n",
      "Iteration 125, loss = 0.66332774\n",
      "Iteration 126, loss = 0.65896069\n",
      "Iteration 127, loss = 0.66066367\n",
      "Iteration 128, loss = 0.65928905\n",
      "Iteration 129, loss = 0.66204242\n",
      "Iteration 130, loss = 0.65194012\n",
      "Iteration 131, loss = 0.66455608\n",
      "Iteration 132, loss = 0.64518236\n",
      "Iteration 133, loss = 0.65216639\n",
      "Iteration 134, loss = 0.65154815\n",
      "Iteration 135, loss = 0.64912385\n",
      "Iteration 136, loss = 0.65711789\n",
      "Iteration 137, loss = 0.64303117\n",
      "Iteration 138, loss = 0.63903234\n",
      "Iteration 139, loss = 0.65266137\n",
      "Iteration 140, loss = 0.64357913\n",
      "Iteration 141, loss = 0.64046510\n",
      "Iteration 142, loss = 0.63717149\n",
      "Iteration 143, loss = 0.62878315\n",
      "Iteration 144, loss = 0.63234662\n",
      "Iteration 145, loss = 0.62471999\n",
      "Iteration 146, loss = 0.62789237\n",
      "Iteration 147, loss = 0.65027759\n",
      "Iteration 148, loss = 0.62254016\n",
      "Iteration 149, loss = 0.62621227\n",
      "Iteration 150, loss = 0.61950184\n",
      "Iteration 151, loss = 0.62169124\n",
      "Iteration 152, loss = 0.62060410\n",
      "Iteration 153, loss = 0.61765984\n",
      "Iteration 154, loss = 0.61889459\n",
      "Iteration 155, loss = 0.60984808\n",
      "Iteration 156, loss = 0.61273002\n",
      "Iteration 157, loss = 0.60532940\n",
      "Iteration 158, loss = 0.60585477\n",
      "Iteration 159, loss = 0.60562966\n",
      "Iteration 160, loss = 0.60758437\n",
      "Iteration 161, loss = 0.59977175\n",
      "Iteration 162, loss = 0.60078640\n",
      "Iteration 163, loss = 0.59936241\n",
      "Iteration 164, loss = 0.60917053\n",
      "Iteration 165, loss = 0.59757367\n",
      "Iteration 166, loss = 0.59136547\n",
      "Iteration 167, loss = 0.59086795\n",
      "Iteration 168, loss = 0.58538646\n",
      "Iteration 169, loss = 0.59182847\n",
      "Iteration 170, loss = 0.58461738\n",
      "Iteration 171, loss = 0.59190010\n",
      "Iteration 172, loss = 0.58937332\n",
      "Iteration 173, loss = 0.58364497\n",
      "Iteration 174, loss = 0.58085502\n",
      "Iteration 175, loss = 0.58179976\n",
      "Iteration 176, loss = 0.57599779\n",
      "Iteration 177, loss = 0.57948696\n",
      "Iteration 178, loss = 0.57939540\n",
      "Iteration 179, loss = 0.57250716\n",
      "Iteration 180, loss = 0.57376338\n",
      "Iteration 181, loss = 0.57372201\n",
      "Iteration 182, loss = 0.57454671\n",
      "Iteration 183, loss = 0.57022759\n",
      "Iteration 184, loss = 0.57568552\n",
      "Iteration 185, loss = 0.56801933\n",
      "Iteration 186, loss = 0.56689422\n",
      "Iteration 187, loss = 0.56833924\n",
      "Iteration 188, loss = 0.56030644\n",
      "Iteration 189, loss = 0.56915636\n",
      "Iteration 190, loss = 0.56279296\n",
      "Iteration 191, loss = 0.55927572\n",
      "Iteration 192, loss = 0.55945232\n",
      "Iteration 193, loss = 0.55882858\n",
      "Iteration 194, loss = 0.55198177\n",
      "Iteration 195, loss = 0.55330163\n",
      "Iteration 196, loss = 0.55443961\n",
      "Iteration 197, loss = 0.55096227\n",
      "Iteration 198, loss = 0.55871537\n",
      "Iteration 199, loss = 0.55084018\n",
      "Iteration 200, loss = 0.55180165\n",
      "Iteration 201, loss = 0.55708396\n",
      "Iteration 202, loss = 0.54829516\n",
      "Iteration 203, loss = 0.54903476\n",
      "Iteration 204, loss = 0.54553317\n",
      "Iteration 205, loss = 0.54627142\n",
      "Iteration 206, loss = 0.54754613\n",
      "Iteration 207, loss = 0.53950450\n",
      "Iteration 208, loss = 0.54032301\n",
      "Iteration 209, loss = 0.54252790\n",
      "Iteration 210, loss = 0.53689988\n",
      "Iteration 211, loss = 0.53550039\n",
      "Iteration 212, loss = 0.54069224\n",
      "Iteration 213, loss = 0.53074145\n",
      "Iteration 214, loss = 0.52985717\n",
      "Iteration 215, loss = 0.53006771\n",
      "Iteration 216, loss = 0.53690600\n",
      "Iteration 217, loss = 0.53400779\n",
      "Iteration 218, loss = 0.53808643\n",
      "Iteration 219, loss = 0.52633020\n",
      "Iteration 220, loss = 0.52995527\n",
      "Iteration 221, loss = 0.52722395\n",
      "Iteration 222, loss = 0.52304091\n",
      "Iteration 223, loss = 0.52348321\n",
      "Iteration 224, loss = 0.54036978\n",
      "Iteration 225, loss = 0.54093863\n",
      "Iteration 226, loss = 0.52022884\n",
      "Iteration 227, loss = 0.54333907\n",
      "Iteration 228, loss = 0.51504946\n",
      "Iteration 229, loss = 0.51744157\n",
      "Iteration 230, loss = 0.52334553\n",
      "Iteration 231, loss = 0.51790747\n",
      "Iteration 232, loss = 0.51301025\n",
      "Iteration 233, loss = 0.52077675\n",
      "Iteration 234, loss = 0.51567950\n",
      "Iteration 235, loss = 0.51596061\n",
      "Iteration 236, loss = 0.51861663\n",
      "Iteration 237, loss = 0.50689643\n",
      "Iteration 238, loss = 0.51892666\n",
      "Iteration 239, loss = 0.50831413\n",
      "Iteration 240, loss = 0.51138030\n",
      "Iteration 241, loss = 0.51319185\n",
      "Iteration 242, loss = 0.50901164\n",
      "Iteration 243, loss = 0.50226861\n",
      "Iteration 244, loss = 0.51026186\n",
      "Iteration 245, loss = 0.50743629\n",
      "Iteration 246, loss = 0.50465165\n",
      "Iteration 247, loss = 0.49606834\n",
      "Iteration 248, loss = 0.49729188\n",
      "Iteration 249, loss = 0.49698820\n",
      "Iteration 250, loss = 0.50407331\n",
      "Iteration 251, loss = 0.49685370\n",
      "Iteration 252, loss = 0.51067404\n",
      "Iteration 253, loss = 0.49826843\n",
      "Iteration 254, loss = 0.49321478\n",
      "Iteration 255, loss = 0.49640794\n",
      "Iteration 256, loss = 0.49529955\n",
      "Iteration 257, loss = 0.49387289\n",
      "Iteration 258, loss = 0.48501643\n",
      "Iteration 259, loss = 0.49853283\n",
      "Iteration 260, loss = 0.49043442\n",
      "Iteration 261, loss = 0.48773858\n",
      "Iteration 262, loss = 0.48783858\n",
      "Iteration 263, loss = 0.49340977\n",
      "Iteration 264, loss = 0.48380684\n",
      "Iteration 265, loss = 0.48884640\n",
      "Iteration 266, loss = 0.48412021\n",
      "Iteration 267, loss = 0.49250363\n",
      "Iteration 268, loss = 0.48801874\n",
      "Iteration 269, loss = 0.48313761\n",
      "Iteration 270, loss = 0.48121902\n",
      "Iteration 271, loss = 0.48154721\n",
      "Iteration 272, loss = 0.47759456\n",
      "Iteration 273, loss = 0.47965521\n",
      "Iteration 274, loss = 0.48067975\n",
      "Iteration 275, loss = 0.47001825\n",
      "Iteration 276, loss = 0.47809248\n",
      "Iteration 277, loss = 0.47861708\n",
      "Iteration 278, loss = 0.46910896\n",
      "Iteration 279, loss = 0.48003498\n",
      "Iteration 280, loss = 0.47460592\n",
      "Iteration 281, loss = 0.47179457\n",
      "Iteration 282, loss = 0.47433437\n",
      "Iteration 283, loss = 0.47208528\n",
      "Iteration 284, loss = 0.46933577\n",
      "Iteration 285, loss = 0.47530673\n",
      "Iteration 286, loss = 0.47611184\n",
      "Iteration 287, loss = 0.46674521\n",
      "Iteration 288, loss = 0.46321641\n",
      "Iteration 289, loss = 0.46613237\n",
      "Iteration 290, loss = 0.46558627\n",
      "Iteration 291, loss = 0.47485192\n",
      "Iteration 292, loss = 0.46750075\n",
      "Iteration 293, loss = 0.46353847\n",
      "Iteration 294, loss = 0.46258141\n",
      "Iteration 295, loss = 0.46424870\n",
      "Iteration 296, loss = 0.45960896\n",
      "Iteration 297, loss = 0.46363047\n",
      "Iteration 298, loss = 0.46586582\n",
      "Iteration 299, loss = 0.46237504\n",
      "Iteration 300, loss = 0.46262342\n",
      "Iteration 301, loss = 0.45875995\n",
      "Iteration 302, loss = 0.45421057\n",
      "Iteration 303, loss = 0.45888611\n",
      "Iteration 304, loss = 0.46343913\n",
      "Iteration 305, loss = 0.45332018\n",
      "Iteration 306, loss = 0.45512507\n",
      "Iteration 307, loss = 0.45504919\n",
      "Iteration 308, loss = 0.46320804\n",
      "Iteration 309, loss = 0.45385179\n",
      "Iteration 310, loss = 0.44743470\n",
      "Iteration 311, loss = 0.44992457\n",
      "Iteration 312, loss = 0.45943743\n",
      "Iteration 313, loss = 0.46158393\n",
      "Iteration 314, loss = 0.44856858\n",
      "Iteration 315, loss = 0.44825643\n",
      "Iteration 316, loss = 0.44648868\n",
      "Iteration 317, loss = 0.45922232\n",
      "Iteration 318, loss = 0.45639559\n",
      "Iteration 319, loss = 0.45983777\n",
      "Iteration 320, loss = 0.44685035\n",
      "Iteration 321, loss = 0.44888193\n",
      "Iteration 322, loss = 0.45161788\n",
      "Iteration 323, loss = 0.45277395\n",
      "Iteration 324, loss = 0.44350805\n",
      "Iteration 325, loss = 0.43685984\n",
      "Iteration 326, loss = 0.44339789\n",
      "Iteration 327, loss = 0.45163116\n",
      "Iteration 328, loss = 0.44410505\n",
      "Iteration 329, loss = 0.44069596\n",
      "Iteration 330, loss = 0.44726447\n",
      "Iteration 331, loss = 0.44434403\n",
      "Iteration 332, loss = 0.44229903\n",
      "Iteration 333, loss = 0.44205122\n",
      "Iteration 334, loss = 0.43631776\n",
      "Iteration 335, loss = 0.43258376\n",
      "Iteration 336, loss = 0.43597529\n",
      "Iteration 337, loss = 0.43321117\n",
      "Iteration 338, loss = 0.44556845\n",
      "Iteration 339, loss = 0.43778828\n",
      "Iteration 340, loss = 0.43442689\n",
      "Iteration 341, loss = 0.43190378\n",
      "Iteration 342, loss = 0.43010990\n",
      "Iteration 343, loss = 0.43343218\n",
      "Iteration 344, loss = 0.42930453\n",
      "Iteration 345, loss = 0.42983711\n",
      "Iteration 346, loss = 0.43208202\n",
      "Iteration 347, loss = 0.43260464\n",
      "Iteration 348, loss = 0.43398453\n",
      "Iteration 349, loss = 0.42769789\n",
      "Iteration 350, loss = 0.42888692\n",
      "Iteration 351, loss = 0.42658176\n",
      "Iteration 352, loss = 0.44691547\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 353, loss = 0.43218736\n",
      "Iteration 354, loss = 0.43287461\n",
      "Iteration 355, loss = 0.43326757\n",
      "Iteration 356, loss = 0.44156901\n",
      "Iteration 357, loss = 0.42765252\n",
      "Iteration 358, loss = 0.42492126\n",
      "Iteration 359, loss = 0.43054619\n",
      "Iteration 360, loss = 0.44733000\n",
      "Iteration 361, loss = 0.42348808\n",
      "Iteration 362, loss = 0.42733076\n",
      "Iteration 363, loss = 0.42523836\n",
      "Iteration 364, loss = 0.42697046\n",
      "Iteration 365, loss = 0.43429663\n",
      "Iteration 366, loss = 0.42027371\n",
      "Iteration 367, loss = 0.41561131\n",
      "Iteration 368, loss = 0.42707154\n",
      "Iteration 369, loss = 0.42270609\n",
      "Iteration 370, loss = 0.42316350\n",
      "Iteration 371, loss = 0.42115830\n",
      "Iteration 372, loss = 0.41716178\n",
      "Iteration 373, loss = 0.42225252\n",
      "Iteration 374, loss = 0.42230461\n",
      "Iteration 375, loss = 0.42138081\n",
      "Iteration 376, loss = 0.41874016\n",
      "Iteration 377, loss = 0.42329604\n",
      "Iteration 378, loss = 0.41493547\n",
      "Iteration 379, loss = 0.42422590\n",
      "Iteration 380, loss = 0.41924144\n",
      "Iteration 381, loss = 0.42320871\n",
      "Iteration 382, loss = 0.42626279\n",
      "Iteration 383, loss = 0.41460277\n",
      "Iteration 384, loss = 0.41623456\n",
      "Iteration 385, loss = 0.42190554\n",
      "Iteration 386, loss = 0.42861474\n",
      "Iteration 387, loss = 0.41051649\n",
      "Iteration 388, loss = 0.41263095\n",
      "Iteration 389, loss = 0.41753807\n",
      "Iteration 390, loss = 0.41249000\n",
      "Iteration 391, loss = 0.40953777\n",
      "Iteration 392, loss = 0.40984730\n",
      "Iteration 393, loss = 0.41303173\n",
      "Iteration 394, loss = 0.41001678\n",
      "Iteration 395, loss = 0.40785828\n",
      "Iteration 396, loss = 0.40938470\n",
      "Iteration 397, loss = 0.40521932\n",
      "Iteration 398, loss = 0.41019337\n",
      "Iteration 399, loss = 0.40781483\n",
      "Iteration 400, loss = 0.40358948\n",
      "Iteration 401, loss = 0.40108871\n",
      "Iteration 402, loss = 0.40701839\n",
      "Iteration 403, loss = 0.40665806\n",
      "Iteration 404, loss = 0.40237582\n",
      "Iteration 405, loss = 0.39874062\n",
      "Iteration 406, loss = 0.41176602\n",
      "Iteration 407, loss = 0.39899875\n",
      "Iteration 408, loss = 0.40265113\n",
      "Iteration 409, loss = 0.41069717\n",
      "Iteration 410, loss = 0.41825379\n",
      "Iteration 411, loss = 0.40150562\n",
      "Iteration 412, loss = 0.40243665\n",
      "Iteration 413, loss = 0.39986675\n",
      "Iteration 414, loss = 0.41241803\n",
      "Iteration 415, loss = 0.40363769\n",
      "Iteration 416, loss = 0.39644536\n",
      "Iteration 417, loss = 0.40837763\n",
      "Iteration 418, loss = 0.40394132\n",
      "Iteration 419, loss = 0.41332886\n",
      "Iteration 420, loss = 0.40652813\n",
      "Iteration 421, loss = 0.40155313\n",
      "Iteration 422, loss = 0.39840486\n",
      "Iteration 423, loss = 0.39875556\n",
      "Iteration 424, loss = 0.39851458\n",
      "Iteration 425, loss = 0.39475148\n",
      "Iteration 426, loss = 0.39834215\n",
      "Iteration 427, loss = 0.39216711\n",
      "Iteration 428, loss = 0.39489472\n",
      "Iteration 429, loss = 0.39976073\n",
      "Iteration 430, loss = 0.39815265\n",
      "Iteration 431, loss = 0.39416455\n",
      "Iteration 432, loss = 0.39556761\n",
      "Iteration 433, loss = 0.40222814\n",
      "Iteration 434, loss = 0.38990365\n",
      "Iteration 435, loss = 0.39955377\n",
      "Iteration 436, loss = 0.39631700\n",
      "Iteration 437, loss = 0.39165493\n",
      "Iteration 438, loss = 0.39392110\n",
      "Iteration 439, loss = 0.39105808\n",
      "Iteration 440, loss = 0.39068423\n",
      "Iteration 441, loss = 0.39522435\n",
      "Iteration 442, loss = 0.39516438\n",
      "Iteration 443, loss = 0.39795253\n",
      "Iteration 444, loss = 0.38896132\n",
      "Iteration 445, loss = 0.39262839\n",
      "Iteration 446, loss = 0.39099709\n",
      "Iteration 447, loss = 0.39759700\n",
      "Iteration 448, loss = 0.39271124\n",
      "Iteration 449, loss = 0.39455410\n",
      "Iteration 450, loss = 0.39815047\n",
      "Iteration 451, loss = 0.39177933\n",
      "Iteration 452, loss = 0.39037336\n",
      "Iteration 453, loss = 0.39483877\n",
      "Iteration 454, loss = 0.40001711\n",
      "Iteration 455, loss = 0.39484864\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "1.9553646304318295\n",
      "Iteration 1, loss = 16.61540308\n",
      "Iteration 2, loss = 3.39122273\n",
      "Iteration 3, loss = 2.33486758\n",
      "Iteration 4, loss = 1.95807140\n",
      "Iteration 5, loss = 1.78059775\n",
      "Iteration 6, loss = 1.68542106\n",
      "Iteration 7, loss = 1.62834883\n",
      "Iteration 8, loss = 1.59354194\n",
      "Iteration 9, loss = 1.56071629\n",
      "Iteration 10, loss = 1.53246253\n",
      "Iteration 11, loss = 1.51002828\n",
      "Iteration 12, loss = 1.49645773\n",
      "Iteration 13, loss = 1.47075677\n",
      "Iteration 14, loss = 1.45171689\n",
      "Iteration 15, loss = 1.43810957\n",
      "Iteration 16, loss = 1.41632466\n",
      "Iteration 17, loss = 1.40031168\n",
      "Iteration 18, loss = 1.38080187\n",
      "Iteration 19, loss = 1.35648873\n",
      "Iteration 20, loss = 1.34247203\n",
      "Iteration 21, loss = 1.31985569\n",
      "Iteration 22, loss = 1.29893372\n",
      "Iteration 23, loss = 1.28278463\n",
      "Iteration 24, loss = 1.26720317\n",
      "Iteration 25, loss = 1.24012794\n",
      "Iteration 26, loss = 1.22869698\n",
      "Iteration 27, loss = 1.20199522\n",
      "Iteration 28, loss = 1.18540175\n",
      "Iteration 29, loss = 1.16219302\n",
      "Iteration 30, loss = 1.14242559\n",
      "Iteration 31, loss = 1.12720402\n",
      "Iteration 32, loss = 1.10471420\n",
      "Iteration 33, loss = 1.09360928\n",
      "Iteration 34, loss = 1.07944619\n",
      "Iteration 35, loss = 1.05644284\n",
      "Iteration 36, loss = 1.04555861\n",
      "Iteration 37, loss = 1.03044205\n",
      "Iteration 38, loss = 1.02215852\n",
      "Iteration 39, loss = 1.00381268\n",
      "Iteration 40, loss = 0.99375197\n",
      "Iteration 41, loss = 0.97550707\n",
      "Iteration 42, loss = 0.96509720\n",
      "Iteration 43, loss = 0.95751023\n",
      "Iteration 44, loss = 0.94916848\n",
      "Iteration 45, loss = 0.93883565\n",
      "Iteration 46, loss = 0.93459823\n",
      "Iteration 47, loss = 0.92551023\n",
      "Iteration 48, loss = 0.91016553\n",
      "Iteration 49, loss = 0.90767672\n",
      "Iteration 50, loss = 0.90209079\n",
      "Iteration 51, loss = 0.89811944\n",
      "Iteration 52, loss = 0.88860145\n",
      "Iteration 53, loss = 0.88219259\n",
      "Iteration 54, loss = 0.88104357\n",
      "Iteration 55, loss = 0.87031510\n",
      "Iteration 56, loss = 0.86779194\n",
      "Iteration 57, loss = 0.86036770\n",
      "Iteration 58, loss = 0.86313354\n",
      "Iteration 59, loss = 0.85429319\n",
      "Iteration 60, loss = 0.85055314\n",
      "Iteration 61, loss = 0.84228159\n",
      "Iteration 62, loss = 0.83631008\n",
      "Iteration 63, loss = 0.83321813\n",
      "Iteration 64, loss = 0.82807924\n",
      "Iteration 65, loss = 0.82055509\n",
      "Iteration 66, loss = 0.81963115\n",
      "Iteration 67, loss = 0.82241743\n",
      "Iteration 68, loss = 0.81824096\n",
      "Iteration 69, loss = 0.80738847\n",
      "Iteration 70, loss = 0.80461628\n",
      "Iteration 71, loss = 0.79816916\n",
      "Iteration 72, loss = 0.78902851\n",
      "Iteration 73, loss = 0.78967582\n",
      "Iteration 74, loss = 0.78383581\n",
      "Iteration 75, loss = 0.78635403\n",
      "Iteration 76, loss = 0.77717232\n",
      "Iteration 77, loss = 0.77234149\n",
      "Iteration 78, loss = 0.77214690\n",
      "Iteration 79, loss = 0.76386786\n",
      "Iteration 80, loss = 0.76003356\n",
      "Iteration 81, loss = 0.75814007\n",
      "Iteration 82, loss = 0.75258336\n",
      "Iteration 83, loss = 0.75769865\n",
      "Iteration 84, loss = 0.74696823\n",
      "Iteration 85, loss = 0.74602142\n",
      "Iteration 86, loss = 0.74077713\n",
      "Iteration 87, loss = 0.73467973\n",
      "Iteration 88, loss = 0.74083123\n",
      "Iteration 89, loss = 0.73280256\n",
      "Iteration 90, loss = 0.73166793\n",
      "Iteration 91, loss = 0.72953765\n",
      "Iteration 92, loss = 0.72381666\n",
      "Iteration 93, loss = 0.71724137\n",
      "Iteration 94, loss = 0.71544739\n",
      "Iteration 95, loss = 0.71193832\n",
      "Iteration 96, loss = 0.70464920\n",
      "Iteration 97, loss = 0.70848909\n",
      "Iteration 98, loss = 0.70400720\n",
      "Iteration 99, loss = 0.70785196\n",
      "Iteration 100, loss = 0.69865786\n",
      "Iteration 101, loss = 0.69728535\n",
      "Iteration 102, loss = 0.69737554\n",
      "Iteration 103, loss = 0.69089036\n",
      "Iteration 104, loss = 0.69328453\n",
      "Iteration 105, loss = 0.69000232\n",
      "Iteration 106, loss = 0.68640781\n",
      "Iteration 107, loss = 0.68934976\n",
      "Iteration 108, loss = 0.67951954\n",
      "Iteration 109, loss = 0.68153276\n",
      "Iteration 110, loss = 0.67823478\n",
      "Iteration 111, loss = 0.67283034\n",
      "Iteration 112, loss = 0.67212760\n",
      "Iteration 113, loss = 0.67090159\n",
      "Iteration 114, loss = 0.66589458\n",
      "Iteration 115, loss = 0.67120954\n",
      "Iteration 116, loss = 0.66602357\n",
      "Iteration 117, loss = 0.66488798\n",
      "Iteration 118, loss = 0.65812119\n",
      "Iteration 119, loss = 0.65764442\n",
      "Iteration 120, loss = 0.65300099\n",
      "Iteration 121, loss = 0.64988210\n",
      "Iteration 122, loss = 0.65298701\n",
      "Iteration 123, loss = 0.65520340\n",
      "Iteration 124, loss = 0.64282963\n",
      "Iteration 125, loss = 0.64682954\n",
      "Iteration 126, loss = 0.64033506\n",
      "Iteration 127, loss = 0.64372410\n",
      "Iteration 128, loss = 0.63751126\n",
      "Iteration 129, loss = 0.63772771\n",
      "Iteration 130, loss = 0.63464640\n",
      "Iteration 131, loss = 0.63520577\n",
      "Iteration 132, loss = 0.62835007\n",
      "Iteration 133, loss = 0.62958743\n",
      "Iteration 134, loss = 0.63083971\n",
      "Iteration 135, loss = 0.62722052\n",
      "Iteration 136, loss = 0.62854279\n",
      "Iteration 137, loss = 0.62297924\n",
      "Iteration 138, loss = 0.62433664\n",
      "Iteration 139, loss = 0.63391864\n",
      "Iteration 140, loss = 0.62076220\n",
      "Iteration 141, loss = 0.62138409\n",
      "Iteration 142, loss = 0.61703717\n",
      "Iteration 143, loss = 0.61390921\n",
      "Iteration 144, loss = 0.61425616\n",
      "Iteration 145, loss = 0.60714788\n",
      "Iteration 146, loss = 0.61337697\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 147, loss = 0.62266708\n",
      "Iteration 148, loss = 0.60438768\n",
      "Iteration 149, loss = 0.60514357\n",
      "Iteration 150, loss = 0.60681010\n",
      "Iteration 151, loss = 0.61080731\n",
      "Iteration 152, loss = 0.60730877\n",
      "Iteration 153, loss = 0.60258874\n",
      "Iteration 154, loss = 0.59739765\n",
      "Iteration 155, loss = 0.59403651\n",
      "Iteration 156, loss = 0.59337057\n",
      "Iteration 157, loss = 0.58965457\n",
      "Iteration 158, loss = 0.58976467\n",
      "Iteration 159, loss = 0.59517091\n",
      "Iteration 160, loss = 0.58905450\n",
      "Iteration 161, loss = 0.58175427\n",
      "Iteration 162, loss = 0.58695330\n",
      "Iteration 163, loss = 0.58032162\n",
      "Iteration 164, loss = 0.58176534\n",
      "Iteration 165, loss = 0.58288554\n",
      "Iteration 166, loss = 0.57720877\n",
      "Iteration 167, loss = 0.57470132\n",
      "Iteration 168, loss = 0.56869162\n",
      "Iteration 169, loss = 0.57594392\n",
      "Iteration 170, loss = 0.57016082\n",
      "Iteration 171, loss = 0.57776027\n",
      "Iteration 172, loss = 0.57117219\n",
      "Iteration 173, loss = 0.56662991\n",
      "Iteration 174, loss = 0.56857212\n",
      "Iteration 175, loss = 0.56933232\n",
      "Iteration 176, loss = 0.56358143\n",
      "Iteration 177, loss = 0.56323252\n",
      "Iteration 178, loss = 0.56443175\n",
      "Iteration 179, loss = 0.55912086\n",
      "Iteration 180, loss = 0.55648180\n",
      "Iteration 181, loss = 0.55832936\n",
      "Iteration 182, loss = 0.55527319\n",
      "Iteration 183, loss = 0.55143123\n",
      "Iteration 184, loss = 0.55368491\n",
      "Iteration 185, loss = 0.55032696\n",
      "Iteration 186, loss = 0.55369903\n",
      "Iteration 187, loss = 0.54988964\n",
      "Iteration 188, loss = 0.54413504\n",
      "Iteration 189, loss = 0.54714132\n",
      "Iteration 190, loss = 0.54800582\n",
      "Iteration 191, loss = 0.54754462\n",
      "Iteration 192, loss = 0.54273996\n",
      "Iteration 193, loss = 0.54463636\n",
      "Iteration 194, loss = 0.53774926\n",
      "Iteration 195, loss = 0.53712650\n",
      "Iteration 196, loss = 0.53642422\n",
      "Iteration 197, loss = 0.53595612\n",
      "Iteration 198, loss = 0.54270085\n",
      "Iteration 199, loss = 0.54180217\n",
      "Iteration 200, loss = 0.53085902\n",
      "Iteration 201, loss = 0.53182710\n",
      "Iteration 202, loss = 0.53292714\n",
      "Iteration 203, loss = 0.53182485\n",
      "Iteration 204, loss = 0.52617509\n",
      "Iteration 205, loss = 0.53302915\n",
      "Iteration 206, loss = 0.53530009\n",
      "Iteration 207, loss = 0.52718749\n",
      "Iteration 208, loss = 0.52735487\n",
      "Iteration 209, loss = 0.52494920\n",
      "Iteration 210, loss = 0.52087708\n",
      "Iteration 211, loss = 0.51769102\n",
      "Iteration 212, loss = 0.52652123\n",
      "Iteration 213, loss = 0.51683058\n",
      "Iteration 214, loss = 0.52098955\n",
      "Iteration 215, loss = 0.51416305\n",
      "Iteration 216, loss = 0.51644710\n",
      "Iteration 217, loss = 0.51058776\n",
      "Iteration 218, loss = 0.51512768\n",
      "Iteration 219, loss = 0.51046627\n",
      "Iteration 220, loss = 0.50866947\n",
      "Iteration 221, loss = 0.51127269\n",
      "Iteration 222, loss = 0.51175754\n",
      "Iteration 223, loss = 0.50796997\n",
      "Iteration 224, loss = 0.51441326\n",
      "Iteration 225, loss = 0.51857720\n",
      "Iteration 226, loss = 0.51105270\n",
      "Iteration 227, loss = 0.51181578\n",
      "Iteration 228, loss = 0.50300181\n",
      "Iteration 229, loss = 0.50108827\n",
      "Iteration 230, loss = 0.50856498\n",
      "Iteration 231, loss = 0.50132396\n",
      "Iteration 232, loss = 0.50018976\n",
      "Iteration 233, loss = 0.50198501\n",
      "Iteration 234, loss = 0.49894319\n",
      "Iteration 235, loss = 0.49351114\n",
      "Iteration 236, loss = 0.50084653\n",
      "Iteration 237, loss = 0.49256871\n",
      "Iteration 238, loss = 0.49615366\n",
      "Iteration 239, loss = 0.49100225\n",
      "Iteration 240, loss = 0.49414937\n",
      "Iteration 241, loss = 0.49157460\n",
      "Iteration 242, loss = 0.49516695\n",
      "Iteration 243, loss = 0.48812108\n",
      "Iteration 244, loss = 0.48818000\n",
      "Iteration 245, loss = 0.48648010\n",
      "Iteration 246, loss = 0.48610824\n",
      "Iteration 247, loss = 0.48238956\n",
      "Iteration 248, loss = 0.48576067\n",
      "Iteration 249, loss = 0.49387564\n",
      "Iteration 250, loss = 0.48644585\n",
      "Iteration 251, loss = 0.48038420\n",
      "Iteration 252, loss = 0.48354958\n",
      "Iteration 253, loss = 0.47987881\n",
      "Iteration 254, loss = 0.48175940\n",
      "Iteration 255, loss = 0.47985150\n",
      "Iteration 256, loss = 0.47508002\n",
      "Iteration 257, loss = 0.47578505\n",
      "Iteration 258, loss = 0.47622644\n",
      "Iteration 259, loss = 0.47984201\n",
      "Iteration 260, loss = 0.47717682\n",
      "Iteration 261, loss = 0.47444031\n",
      "Iteration 262, loss = 0.46944649\n",
      "Iteration 263, loss = 0.46959249\n",
      "Iteration 264, loss = 0.46951128\n",
      "Iteration 265, loss = 0.46949913\n",
      "Iteration 266, loss = 0.46637341\n",
      "Iteration 267, loss = 0.47090596\n",
      "Iteration 268, loss = 0.47124852\n",
      "Iteration 269, loss = 0.47504977\n",
      "Iteration 270, loss = 0.48204180\n",
      "Iteration 271, loss = 0.46443218\n",
      "Iteration 272, loss = 0.46906336\n",
      "Iteration 273, loss = 0.46401288\n",
      "Iteration 274, loss = 0.46630231\n",
      "Iteration 275, loss = 0.45742849\n",
      "Iteration 276, loss = 0.46578313\n",
      "Iteration 277, loss = 0.46279600\n",
      "Iteration 278, loss = 0.45817512\n",
      "Iteration 279, loss = 0.47315682\n",
      "Iteration 280, loss = 0.45953489\n",
      "Iteration 281, loss = 0.45843550\n",
      "Iteration 282, loss = 0.46312440\n",
      "Iteration 283, loss = 0.45950777\n",
      "Iteration 284, loss = 0.46257375\n",
      "Iteration 285, loss = 0.46647872\n",
      "Iteration 286, loss = 0.45359786\n",
      "Iteration 287, loss = 0.45419833\n",
      "Iteration 288, loss = 0.45174313\n",
      "Iteration 289, loss = 0.45078954\n",
      "Iteration 290, loss = 0.45414087\n",
      "Iteration 291, loss = 0.45850675\n",
      "Iteration 292, loss = 0.45387628\n",
      "Iteration 293, loss = 0.45311365\n",
      "Iteration 294, loss = 0.46183318\n",
      "Iteration 295, loss = 0.44800784\n",
      "Iteration 296, loss = 0.44691113\n",
      "Iteration 297, loss = 0.45432825\n",
      "Iteration 298, loss = 0.45411756\n",
      "Iteration 299, loss = 0.44453678\n",
      "Iteration 300, loss = 0.44362985\n",
      "Iteration 301, loss = 0.44535770\n",
      "Iteration 302, loss = 0.44940259\n",
      "Iteration 303, loss = 0.44970321\n",
      "Iteration 304, loss = 0.44980334\n",
      "Iteration 305, loss = 0.44221902\n",
      "Iteration 306, loss = 0.44007634\n",
      "Iteration 307, loss = 0.44130876\n",
      "Iteration 308, loss = 0.43269216\n",
      "Iteration 309, loss = 0.43807604\n",
      "Iteration 310, loss = 0.43490485\n",
      "Iteration 311, loss = 0.44436741\n",
      "Iteration 312, loss = 0.43397492\n",
      "Iteration 313, loss = 0.44479440\n",
      "Iteration 314, loss = 0.43824747\n",
      "Iteration 315, loss = 0.43578495\n",
      "Iteration 316, loss = 0.42979033\n",
      "Iteration 317, loss = 0.44747326\n",
      "Iteration 318, loss = 0.44539883\n",
      "Iteration 319, loss = 0.43515877\n",
      "Iteration 320, loss = 0.43375152\n",
      "Iteration 321, loss = 0.43814744\n",
      "Iteration 322, loss = 0.43808565\n",
      "Iteration 323, loss = 0.43389970\n",
      "Iteration 324, loss = 0.43230063\n",
      "Iteration 325, loss = 0.42671729\n",
      "Iteration 326, loss = 0.43249440\n",
      "Iteration 327, loss = 0.44424857\n",
      "Iteration 328, loss = 0.43295609\n",
      "Iteration 329, loss = 0.42748617\n",
      "Iteration 330, loss = 0.42762897\n",
      "Iteration 331, loss = 0.42769355\n",
      "Iteration 332, loss = 0.43246607\n",
      "Iteration 333, loss = 0.43587362\n",
      "Iteration 334, loss = 0.42523515\n",
      "Iteration 335, loss = 0.42549703\n",
      "Iteration 336, loss = 0.42819694\n",
      "Iteration 337, loss = 0.42078763\n",
      "Iteration 338, loss = 0.42551748\n",
      "Iteration 339, loss = 0.42089104\n",
      "Iteration 340, loss = 0.41913161\n",
      "Iteration 341, loss = 0.41643138\n",
      "Iteration 342, loss = 0.41937447\n",
      "Iteration 343, loss = 0.42173123\n",
      "Iteration 344, loss = 0.41856232\n",
      "Iteration 345, loss = 0.42077167\n",
      "Iteration 346, loss = 0.42104524\n",
      "Iteration 347, loss = 0.42127619\n",
      "Iteration 348, loss = 0.42365978\n",
      "Iteration 349, loss = 0.41668340\n",
      "Iteration 350, loss = 0.40930781\n",
      "Iteration 351, loss = 0.41046458\n",
      "Iteration 352, loss = 0.41847550\n",
      "Iteration 353, loss = 0.42471695\n",
      "Iteration 354, loss = 0.41413109\n",
      "Iteration 355, loss = 0.41673088\n",
      "Iteration 356, loss = 0.42265113\n",
      "Iteration 357, loss = 0.41372433\n",
      "Iteration 358, loss = 0.41181465\n",
      "Iteration 359, loss = 0.41800022\n",
      "Iteration 360, loss = 0.42026881\n",
      "Iteration 361, loss = 0.41252571\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.1714505546328877\n",
      "[11.34638076  8.53319488 13.99944047 ...  6.72509982 13.64127074\n",
      " 13.68172486]\n",
      "[[0, 84658], [1, 5080], [2, 1201931], [3, 1416358], [4, 1502802], [5, 770], [6, 39], [7, 1623057], [8, 228055], [9, 10203229], [10, 32], [11, 2806869], [12, 7901943], [13, 24672850], [14, 957645], [15, 936056], [16, 55119], [17, 2555336], [18, 725569], [19, 13491], [20, 71], [21, 5244449], [22, 185711826], [23, 93], [24, 224326], [25, 4617052], [26, 41576], [27, 85], [28, 354], [29, 15029], [30, 2182250], [31, 49826], [32, 42092289], [33, 4638816], [34, 3], [35, 30], [36, 23504865], [37, 1482134], [38, 6008536], [39, 4606172], [40, 3099339], [41, 903147], [42, 2516953], [43, 1298], [44, 266736], [45, 37], [46, 84], [47, 25], [48, 2011], [49, 31212822], [50, 787632], [51, 29890370], [52, 2725329], [53, 194549], [54, 23397947], [55, 305636], [56, 2328665], [57, 1537497], [58, 407871], [59, 667174], [60, 208160], [61, 729041], [62, 8041185], [63, 8430], [64, 637], [65, 51], [66, 173144], [67, 166218], [68, 125181], [69, 1122058], [70, 5708250], [71, 159070], [72, 48], [73, 63644], [74, 36429586], [75, 256], [76, 1015061], [77, 3856469], [78, 142084954], [79, 7591657], [80, 344], [81, 186], [82, 32995629], [83, 379800], [84, 41360], [85, 31554], [86, 821133], [87, 18450934], [88, 555], [89, 4798209], [90, 3557017], [91, 758950], [92, 740977], [93, 136323], [94, 174863], [95, 4007038], [96, 2428593], [97, 583], [98, 15953185], [99, 6], [100, 72525], [101, 918042], [102, 944], [103, 3988], [104, 1431904], [105, 65], [106, 33342951], [107, 941327], [108, 37936501], [109, 1616250], [110, 23], [111, 8], [112, 608510], [113, 326865], [114, 773796], [115, 7501557], [116, 74], [117, 14554], [118, 26475], [119, 2004381], [120, 118], [121, 5749497], [122, 1808263], [123, 71], [124, 177031], [125, 970374], [126, 2300117], [127, 1447298], [128, 183121], [129, 46776], [130, 227789], [131, 1727810], [132, 54359], [133, 243789], [134, 294], [135, 5240921], [136, 47], [137, 143045], [138, 1768250], [139, 214805], [140, 1756339], [141, 3238944], [142, 8], [143, 7296], [144, 1360], [145, 843978], [146, 2199706], [147, 1067795], [148, 489], [149, 3479253], [150, 2948397], [151, 1294491], [152, 232995953], [153, 65186], [154, 1260425], [155, 56430464], [156, 245054481], [157, 1373186], [158, 333468], [159, 69], [160, 3618286], [161, 5716297], [162, 10539], [163, 2447096], [164, 4597807], [165, 5183205], [166, 13275944], [167, 74466045], [168, 6505279], [169, 117], [170, 130802], [171, 11667594], [172, 264670], [173, 12214546], [174, 1207563], [175, 692], [176, 81], [177, 13231284], [178, 25644], [179, 1441422], [180, 21501953], [181, 3034324], [182, 2061], [183, 55674], [184, 96184361], [185, 7416602], [186, 394010], [187, 1309030], [188, 45152], [189, 26338], [190, 617346328], [191, 22664895], [192, 8948395], [193, 5], [194, 25], [195, 166547], [196, 33253233], [197, 134751], [198, 334], [199, 239742], [200, 1], [201, 6320364], [202, 4146946], [203, 74445], [204, 77228373], [205, 27474761], [206, 20], [207, 1065], [208, 15292807], [209, 16664441], [210, 8115], [211, 5394794], [212, 115], [213, 363822], [214, 14086], [215, 16], [216, 477], [217, 4110888], [218, 4271442], [219, 826812], [220, 156], [221, 66], [222, 4693], [223, 1749181], [224, 101871], [225, 27524609], [226, 107039], [227, 1339], [228, 77], [229, 17085247], [230, 2832582], [231, 11457042], [232, 445], [233, 237080], [234, 3243564], [235, 187360], [236, 889813], [237, 7668182], [238, 25498], [239, 536502], [240, 7819], [241, 83866402], [242, 29], [243, 21367], [244, 2279087], [245, 1260251], [246, 1088010], [247, 8522988], [248, 2808909], [249, 63294], [250, 29865], [251, 158290], [252, 62439], [253, 1280663], [254, 1873568], [255, 13963150], [256, 96], [257, 96006783], [258, 209104857], [259, 284084], [260, 169828], [261, 1338949], [262, 341236], [263, 7457], [264, 2078376], [265, 35796], [266, 30868544], [267, 18541197], [268, 266671], [269, 4], [270, 3531487], [271, 2626049], [272, 1905796], [273, 20540291], [274, 290019], [275, 155324645], [276, 3637125], [277, 2395442], [278, 81], [279, 27167571], [280, 127], [281, 540479], [282, 11], [283, 636], [284, 2810650], [285, 428161], [286, 620656], [287, 140292], [288, 135431], [289, 237596], [290, 340093], [291, 49], [292, 12967783], [293, 2995940], [294, 17424], [295, 376], [296, 1284620], [297, 1955447], [298, 2635249], [299, 172], [300, 22053802], [301, 5], [302, 142086], [303, 621152], [304, 140942], [305, 13576], [306, 132], [307, 6403160], [308, 3052854], [309, 3655560], [310, 4083047], [311, 336], [312, 4], [313, 727800], [314, 469304], [315, 41705277], [316, 226], [317, 205608], [318, 6], [319, 23428], [320, 3], [321, 154714], [322, 108644714], [323, 572219], [324, 362], [325, 425227], [326, 736135], [327, 115], [328, 27], [329, 16], [330, 502636], [331, 1669263], [332, 266844], [333, 82941], [334, 886108], [335, 46], [336, 87305], [337, 142396], [338, 6269898], [339, 20], [340, 12356362], [341, 288811], [342, 266317597], [343, 358], [344, 6], [345, 95], [346, 1427073], [347, 14838], [348, 534622], [349, 2841], [350, 2278013], [351, 1966235], [352, 352522], [353, 1135991], [354, 1200434], [355, 1146647], [356, 4257], [357, 195188466], [358, 3587241], [359, 4168538], [360, 77088], [361, 2411581], [362, 955802], [363, 137050468], [364, 2595992], [365, 1984647], [366, 652813], [367, 107147122], [368, 9244564], [369, 68785744], [370, 187041], [371, 17], [372, 12151], [373, 309056], [374, 4238], [375, 3752082], [376, 17216], [377, 2807650], [378, 29096182], [379, 6], [380, 729], [381, 209], [382, 13531200], [383, 5262769], [384, 6397619], [385, 8], [386, 23068674], [387, 1880465], [388, 2844954], [389, 14013], [390, 4850191], [391, 27887289], [392, 14861887], [393, 1002], [394, 28434655], [395, 1], [396, 1538], [397, 42373], [398, 1512612], [399, 61370775], [400, 6243916], [401, 1762], [402, 640433], [403, 15275], [404, 243], [405, 2852502], [406, 9022523], [407, 310044], [408, 9092150], [409, 289365], [410, 22069397], [411, 2443], [412, 910720], [413, 198], [414, 1425573], [415, 357698], [416, 200770], [417, 8471604], [418, 23478614], [419, 1094832], [420, 4622390], [421, 504720], [422, 30611], [423, 6], [424, 9498], [425, 3571807], [426, 57153], [427, 9], [428, 35871], [429, 1603], [430, 156], [431, 5540755], [432, 8], [433, 24927], [434, 53144], [435, 1390938], [436, 9090282], [437, 208876], [438, 3563906], [439, 6000484], [440, 105], [441, 58115], [442, 42], [443, 215], [444, 6171977], [445, 5454981], [446, 109615], [447, 66801], [448, 3047590], [449, 535258], [450, 22], [451, 4821], [452, 5157889], [453, 19339483], [454, 4176], [455, 8545825], [456, 8744522], [457, 135986], [458, 597806], [459, 45808], [460, 517510], [461, 2546504], [462, 393085], [463, 3025965], [464, 348234], [465, 15089], [466, 2792623], [467, 37], [468, 14], [469, 9], [470, 5222166], [471, 936740], [472, 78784], [473, 2], [474, 5538604], [475, 7], [476, 4436034], [477, 1479225], [478, 3069931], [479, 5258247], [480, 312], [481, 4388417], [482, 237711], [483, 1662883], [484, 1603665], [485, 2125909], [486, 1028], [487, 471901], [488, 16], [489, 825126], [490, 8972], [491, 30], [492, 435772], [493, 41], [494, 129], [495, 350072], [496, 415623], [497, 21927], [498, 42], [499, 16177], [500, 14979537], [501, 30022], [502, 377], [503, 61], [504, 56597024], [505, 235371], [506, 6683], [507, 95463], [508, 202], [509, 1861350], [510, 42], [511, 40], [512, 12025147], [513, 3359634], [514, 5497167], [515, 1530813], [516, 339419], [517, 2791276], [518, 7524070], [519, 132641], [520, 219], [521, 1157], [522, 5563], [523, 9], [524, 391153], [525, 4831], [526, 100], [527, 67247], [528, 1306403], [529, 56290], [530, 304742], [531, 29557059], [532, 20425126], [533, 607937], [534, 82475], [535, 330302786], [536, 6], [537, 379272], [538, 39914207], [539, 3930585], [540, 61], [541, 239810029], [542, 79941786], [543, 1591745], [544, 12], [545, 474], [546, 296822], [547, 992137], [548, 219042], [549, 771], [550, 196], [551, 36296], [552, 5854058], [553, 250300], [554, 1], [555, 10343328], [556, 121116], [557, 7419592], [558, 267115], [559, 378087], [560, 393], [561, 96122], [562, 133905], [563, 5011720], [564, 5747092], [565, 58354], [566, 10036360], [567, 35764834], [568, 4], [569, 5685635], [570, 18765187], [571, 2987], [572, 201627], [573, 1017426], [574, 120319], [575, 351470], [576, 8713223], [577, 2493834], [578, 533918], [579, 9026], [580, 3177], [581, 1335252], [582, 2609280], [583, 1271176], [584, 201976], [585, 4752371], [586, 47676251], [587, 1604906], [588, 45926380], [589, 2106], [590, 3258245], [591, 321350], [592, 32845], [593, 5461513], [594, 21], [595, 50783], [596, 2142058], [597, 1878432], [598, 1092587], [599, 2272], [600, 135], [601, 2040580], [602, 1203477], [603, 3643947], [604, 203288], [605, 1752929], [606, 36139229], [607, 16048073], [608, 10989171], [609, 165599], [610, 6170], [611, 32315912], [612, 99343], [613, 366], [614, 1660], [615, 425552], [616, 15570], [617, 23], [618, 40101], [619, 7397017], [620, 9203], [621, 16184], [622, 55925], [623, 34], [624, 40356], [625, 955808], [626, 12641119], [627, 926858], [628, 8953534], [629, 435303], [630, 1476346], [631, 23], [632, 16242265], [633, 19], [634, 1881], [635, 75642643], [636, 32677906], [637, 96855], [638, 53478], [639, 1139658], [640, 86506], [641, 359391], [642, 91426], [643, 559118], [644, 15871509], [645, 599263], [646, 940021], [647, 59947], [648, 3533279], [649, 3], [650, 62330], [651, 15], [652, 2385662], [653, 56266], [654, 120826], [655, 4729], [656, 344], [657, 3105458], [658, 73], [659, 32], [660, 7678797], [661, 1499110], [662, 956], [663, 6203965], [664, 5057546], [665, 732533], [666, 702154], [667, 79644], [668, 291], [669, 32221], [670, 338020], [671, 99677], [672, 995], [673, 6306024], [674, 75], [675, 128727880], [676, 2120345], [677, 6811], [678, 88971779], [679, 1539315], [680, 27], [681, 6], [682, 13915258], [683, 153], [684, 50984477], [685, 11787585], [686, 16368], [687, 68], [688, 48731], [689, 471936], [690, 645340], [691, 710371], [692, 36519756], [693, 1167867], [694, 69], [695, 60714], [696, 19484177], [697, 20733015], [698, 3089117], [699, 98162589], [700, 7054422], [701, 100740], [702, 437503], [703, 42], [704, 1184980], [705, 55], [706, 101382], [707, 4351072], [708, 606915], [709, 339693], [710, 2362127], [711, 650129], [712, 316912], [713, 715058], [714, 305], [715, 3832], [716, 101892000], [717, 32903], [718, 589], [719, 20444829], [720, 1428609], [721, 1129720], [722, 26097149], [723, 103], [724, 349825395], [725, 24062272], [726, 11536681], [727, 9856807], [728, 115451175], [729, 384084], [730, 11295740], [731, 1959639], [732, 211], [733, 3600133], [734, 26119], [735, 241283], [736, 503935], [737, 6986378], [738, 27572], [739, 6941], [740, 359826], [741, 2451498], [742, 7039241], [743, 30630145], [744, 60417690], [745, 4460552], [746, 21335], [747, 929], [748, 8577323], [749, 65911], [750, 711803], [751, 102], [752, 1723335], [753, 1821], [754, 30115482], [755, 320], [756, 2201], [757, 478650], [758, 6149461], [759, 293], [760, 645009], [761, 2246298], [762, 1836865], [763, 1492258], [764, 27], [765, 1699649], [766, 21545], [767, 360], [768, 57448], [769, 6698], [770, 830412], [771, 14959773], [772, 31445832], [773, 37], [774, 30661039], [775, 8], [776, 41205], [777, 4940445], [778, 14939354], [779, 1691], [780, 2919039], [781, 271], [782, 1075762], [783, 27908194], [784, 41746132], [785, 1761287], [786, 90578], [787, 8640750], [788, 2194324], [789, 2312593], [790, 326454652], [791, 46], [792, 4357386], [793, 13372], [794, 129304], [795, 1057400], [796, 82826], [797, 148], [798, 561275], [799, 405738], [800, 112768], [801, 368844], [802, 21999], [803, 18179], [804, 338818], [805, 1916702], [806, 1034817], [807, 8921101], [808, 429], [809, 2743672], [810, 728939], [811, 406188], [812, 52718148], [813, 261024523], [814, 50014], [815, 4694773], [816, 57], [817, 80779], [818, 1397456], [819, 1946], [820, 947397], [821, 3756049], [822, 593165], [823, 19027002], [824, 4], [825, 3856393], [826, 181579534], [827, 6373405], [828, 5954492], [829, 59129], [830, 5108], [831, 1345932], [832, 3161090], [833, 1448329], [834, 48683015], [835, 107], [836, 5547410], [837, 92921096], [838, 39930670], [839, 224181], [840, 32752], [841, 2892178], [842, 81340], [843, 280], [844, 173427], [845, 162589], [846, 93], [847, 4840160], [848, 8718711], [849, 6872175], [850, 343300], [851, 14], [852, 6324167], [853, 598673], [854, 3031760], [855, 467110], [856, 115749771], [857, 179338], [858, 4171514], [859, 177574], [860, 32526680], [861, 1], [862, 7], [863, 128808860], [864, 75054774], [865, 49734], [866, 149388], [867, 1173706], [868, 6782298], [869, 48408026], [870, 29426220], [871, 94873], [872, 5505037], [873, 7819314], [874, 1593634], [875, 9679994], [876, 244572], [877, 80093172], [878, 289121], [879, 62585], [880, 10975323], [881, 118431], [882, 15147], [883, 50737], [884, 3013671], [885, 1300350], [886, 252912], [887, 1191225], [888, 1495196], [889, 534979], [890, 22], [891, 203289], [892, 1934735], [893, 417345], [894, 130], [895, 58005444], [896, 5632], [897, 5533300], [898, 1756116], [899, 134], [900, 3620237], [901, 16], [902, 53899], [903, 499], [904, 154151], [905, 20315], [906, 60540], [907, 726325], [908, 2169618], [909, 137863], [910, 146], [911, 30350661], [912, 3103], [913, 148338343], [914, 698279], [915, 509], [916, 1257452], [917, 96], [918, 3519100], [919, 1055503], [920, 2171], [921, 636921], [922, 5627515], [923, 6125047], [924, 563118], [925, 2842188], [926, 228033], [927, 6245], [928, 905], [929, 2934056], [930, 2112279], [931, 1465277], [932, 31], [933, 2484687], [934, 2550], [935, 343631658], [936, 25589], [937, 41], [938, 19765561], [939, 10910321], [940, 4773883], [941, 1597], [942, 701], [943, 159100], [944, 218], [945, 239], [946, 143], [947, 29985178], [948, 403126], [949, 1712826], [950, 699251], [951, 9], [952, 498631], [953, 212334], [954, 95788], [955, 113], [956, 18539], [957, 1789], [958, 685566], [959, 222], [960, 3776736], [961, 72716], [962, 693512], [963, 1201], [964, 3071932], [965, 129], [966, 570804], [967, 2970946], [968, 16165080], [969, 185], [970, 287658], [971, 33276], [972, 1777883], [973, 6155249], [974, 23972065], [975, 1483], [976, 1124], [977, 1027077], [978, 8442], [979, 38110], [980, 1824473], [981, 5016044], [982, 986416], [983, 2243920], [984, 3218538], [985, 40268], [986, 446266], [987, 223], [988, 576609], [989, 2460071], [990, 2118], [991, 1322238], [992, 3721979], [993, 1185834], [994, 12732833], [995, 598851], [996, 192771], [997, 39], [998, 2580326], [999, 17017], [1000, 971379], [1001, 1721171], [1002, 5979903], [1003, 1744], [1004, 17558753], [1005, 16901085], [1006, 10610116], [1007, 3114], [1008, 78605], [1009, 1000065], [1010, 810], [1011, 203], [1012, 82269], [1013, 1583845], [1014, 3800], [1015, 819], [1016, 31545], [1017, 13262], [1018, 18154], [1019, 350607], [1020, 8029284], [1021, 106], [1022, 9888590], [1023, 1350368], [1024, 3607], [1025, 1772757], [1026, 18], [1027, 2749974], [1028, 584], [1029, 1089885], [1030, 32792], [1031, 1010445], [1032, 41273], [1033, 8726593], [1034, 42688], [1035, 1402950], [1036, 1365268], [1037, 320610], [1038, 8028853], [1039, 6], [1040, 11046023], [1041, 84829], [1042, 3112690], [1043, 1033425], [1044, 1050192], [1045, 33803245], [1046, 5782707], [1047, 996890], [1048, 1064], [1049, 11305159], [1050, 78378], [1051, 94], [1052, 224063], [1053, 17830705], [1054, 13607585], [1055, 1065780], [1056, 1286561], [1057, 5025149], [1058, 1598], [1059, 519964], [1060, 2696449], [1061, 55698], [1062, 14867688], [1063, 5035991], [1064, 3670], [1065, 628763], [1066, 4789797], [1067, 3551396], [1068, 21421], [1069, 30179], [1070, 114926], [1071, 3893], [1072, 2947433], [1073, 60032], [1074, 162], [1075, 22051861], [1076, 440187], [1077, 76792], [1078, 7461463], [1079, 229630], [1080, 72044020], [1081, 46179823], [1082, 6281063], [1083, 303927], [1084, 1879704], [1085, 358], [1086, 2823073], [1087, 15], [1088, 12864], [1089, 3], [1090, 18575280], [1091, 4690181], [1092, 3], [1093, 2317768], [1094, 32958434], [1095, 12481], [1096, 229542], [1097, 40029], [1098, 29846], [1099, 16088], [1100, 452025], [1101, 1273361], [1102, 2459589], [1103, 6464001], [1104, 53353018], [1105, 94261], [1106, 13369], [1107, 11447755], [1108, 1079], [1109, 4923], [1110, 99], [1111, 894283], [1112, 1691636], [1113, 2023045], [1114, 693798], [1115, 1622132], [1116, 888612], [1117, 6136360], [1118, 205813], [1119, 568738], [1120, 9330], [1121, 900184], [1122, 194796], [1123, 181], [1124, 135015], [1125, 26861653], [1126, 2359432], [1127, 411980], [1128, 153], [1129, 3306001], [1130, 913685], [1131, 423078], [1132, 414178], [1133, 15558142], [1134, 11377809], [1135, 16845498], [1136, 184], [1137, 10], [1138, 301346], [1139, 142797776], [1140, 1293], [1141, 429318], [1142, 1519572], [1143, 500], [1144, 2394857], [1145, 2386714], [1146, 1030547], [1147, 866861], [1148, 3662], [1149, 3174834], [1150, 87734201], [1151, 287301], [1152, 1780298], [1153, 130034732], [1154, 108575], [1155, 1088584], [1156, 132846730], [1157, 254572], [1158, 138698], [1159, 9107126], [1160, 107937695], [1161, 17020057], [1162, 5079556], [1163, 1030362], [1164, 2565], [1165, 205], [1166, 3061], [1167, 8082732], [1168, 29551126], [1169, 265], [1170, 2], [1171, 297], [1172, 22], [1173, 302144], [1174, 159787], [1175, 100088728], [1176, 2049488], [1177, 3618], [1178, 1509940], [1179, 395], [1180, 16560082], [1181, 2633016], [1182, 5611816], [1183, 1366949], [1184, 52], [1185, 3416338], [1186, 4694070], [1187, 16689643], [1188, 84671], [1189, 409803], [1190, 581945], [1191, 74806445], [1192, 29112000], [1193, 30725], [1194, 1976], [1195, 2411683], [1196, 7754834], [1197, 39270383], [1198, 1973867], [1199, 37562], [1200, 6], [1201, 2], [1202, 470], [1203, 4471095], [1204, 13492], [1205, 2803659], [1206, 31948703], [1207, 1077], [1208, 37], [1209, 5621345], [1210, 6058847], [1211, 981621], [1212, 6552], [1213, 2099443], [1214, 5321952], [1215, 2918093], [1216, 4507332], [1217, 823777], [1218, 87470], [1219, 2700915], [1220, 1117], [1221, 24], [1222, 195], [1223, 2108895], [1224, 15696758], [1225, 142589], [1226, 314], [1227, 395324], [1228, 42011], [1229, 7645954], [1230, 9337], [1231, 356108], [1232, 1329678], [1233, 2702281], [1234, 35057028], [1235, 844], [1236, 4103531], [1237, 14951600], [1238, 22], [1239, 4795368], [1240, 2585023], [1241, 380233], [1242, 2368], [1243, 1753], [1244, 228568], [1245, 134], [1246, 38666129], [1247, 246], [1248, 85043], [1249, 6066418], [1250, 259], [1251, 1105375], [1252, 662420], [1253, 9137587], [1254, 16550], [1255, 2276349], [1256, 782], [1257, 6397403], [1258, 4], [1259, 31], [1260, 4179502], [1261, 2671669], [1262, 10135], [1263, 37881], [1264, 143469], [1265, 5252], [1266, 2], [1267, 172], [1268, 1211725], [1269, 28], [1270, 224315], [1271, 5371515], [1272, 1709566], [1273, 1600380], [1274, 3255786], [1275, 5776], [1276, 37877], [1277, 710376], [1278, 3508696], [1279, 13675462], [1280, 4288167], [1281, 204805967], [1282, 1037139], [1283, 4150897], [1284, 4598926], [1285, 1553105], [1286, 17590721], [1287, 107219], [1288, 3729565], [1289, 14693326], [1290, 7677236], [1291, 15320001], [1292, 23401], [1293, 585991], [1294, 19912], [1295, 3313019], [1296, 12194], [1297, 3285335], [1298, 833929], [1299, 3911962], [1300, 27627], [1301, 2609896], [1302, 57196188], [1303, 6109921], [1304, 1736049], [1305, 1805860], [1306, 478423], [1307, 20398004], [1308, 5], [1309, 5647808], [1310, 2425335], [1311, 98197], [1312, 1124262], [1313, 209272], [1314, 233087], [1315, 326821552], [1316, 323], [1317, 333], [1318, 362], [1319, 641685], [1320, 6452], [1321, 23818], [1322, 1759098], [1323, 1706835], [1324, 43439], [1325, 29726478], [1326, 715886], [1327, 1706720], [1328, 454368], [1329, 108916], [1330, 52081], [1331, 82162224], [1332, 1688859], [1333, 94352674], [1334, 261572924], [1335, 3371], [1336, 6039769], [1337, 10842367], [1338, 11113], [1339, 49562], [1340, 28790825], [1341, 12], [1342, 16697256], [1343, 1932081], [1344, 204], [1345, 51], [1346, 47638], [1347, 140872433], [1348, 958], [1349, 98836], [1350, 155], [1351, 7371639], [1352, 3028619], [1353, 750101], [1354, 6852285], [1355, 316221], [1356, 3389300], [1357, 6844], [1358, 804386], [1359, 9114117], [1360, 6], [1361, 874], [1362, 469], [1363, 171717], [1364, 2447], [1365, 886730], [1366, 19250], [1367, 1055110], [1368, 1413770], [1369, 2641869], [1370, 1860250], [1371, 204990], [1372, 482], [1373, 17505559], [1374, 29816], [1375, 12660], [1376, 57629], [1377, 22743], [1378, 18628675], [1379, 10799050], [1380, 267643], [1381, 2019], [1382, 11790184], [1383, 1463219], [1384, 850848], [1385, 196361], [1386, 6337], [1387, 16405276], [1388, 603222], [1389, 26021179], [1390, 25245254], [1391, 230107], [1392, 1152], [1393, 27187286], [1394, 77931], [1395, 101], [1396, 190707], [1397, 394098], [1398, 30038], [1399, 125], [1400, 113934], [1401, 7713], [1402, 76528], [1403, 78608], [1404, 3116952], [1405, 41643238], [1406, 2578392], [1407, 203856], [1408, 5978061], [1409, 44313], [1410, 4113350], [1411, 3150907], [1412, 106378], [1413, 5999279], [1414, 126022], [1415, 24514198], [1416, 20184378], [1417, 183415], [1418, 470374], [1419, 26962874], [1420, 54569], [1421, 178], [1422, 4781226], [1423, 6503423], [1424, 2259223], [1425, 2480626], [1426, 150591], [1427, 44603], [1428, 4441], [1429, 25556], [1430, 5836306], [1431, 257877], [1432, 2031150], [1433, 25], [1434, 189993], [1435, 102696], [1436, 849917], [1437, 1329], [1438, 422], [1439, 11541], [1440, 412661], [1441, 898339], [1442, 845129], [1443, 70528], [1444, 41627983], [1445, 37], [1446, 21429217], [1447, 71487], [1448, 52297540], [1449, 1612559], [1450, 170], [1451, 5], [1452, 8247], [1453, 1254162], [1454, 679839], [1455, 322541], [1456, 280293], [1457, 6413744], [1458, 3478], [1459, 5692135], [1460, 20725294], [1461, 11181632], [1462, 188162], [1463, 19112145], [1464, 2836767], [1465, 2937411], [1466, 6593218], [1467, 16626], [1468, 461], [1469, 952029], [1470, 1274397], [1471, 709772], [1472, 1421619], [1473, 2998], [1474, 999], [1475, 28654912], [1476, 28618823], [1477, 71066], [1478, 1644685], [1479, 3720189], [1480, 809764], [1481, 836993], [1482, 436262], [1483, 1330868], [1484, 237430], [1485, 572033], [1486, 1158736], [1487, 9001170], [1488, 1949544], [1489, 42], [1490, 8800], [1491, 9], [1492, 909020], [1493, 1008], [1494, 148771], [1495, 228348], [1496, 288740], [1497, 2681263], [1498, 19], [1499, 675], [1500, 79181], [1501, 30], [1502, 33332], [1503, 7727], [1504, 53], [1505, 18278618], [1506, 655], [1507, 54973], [1508, 30328], [1509, 8784129], [1510, 19538354], [1511, 9745], [1512, 1359], [1513, 78], [1514, 2426887], [1515, 84463], [1516, 35], [1517, 39620], [1518, 312703], [1519, 6499560], [1520, 52841], [1521, 189157], [1522, 8475166], [1523, 305], [1524, 2160144], [1525, 1806987], [1526, 2056452], [1527, 379585], [1528, 34518203], [1529, 942287], [1530, 4116896], [1531, 569], [1532, 6], [1533, 10305316], [1534, 8], [1535, 61], [1536, 194594778], [1537, 357756], [1538, 14339], [1539, 75905], [1540, 116292], [1541, 2882131], [1542, 470860], [1543, 400127], [1544, 8353], [1545, 151597], [1546, 4672], [1547, 29519], [1548, 3626], [1549, 518594], [1550, 11921491], [1551, 31717004], [1552, 20299094], [1553, 4], [1554, 391], [1555, 26089], [1556, 214], [1557, 17], [1558, 100505], [1559, 24975536], [1560, 391], [1561, 7064772], [1562, 6834478], [1563, 465853], [1564, 24376127], [1565, 678176], [1566, 1067788], [1567, 5441432], [1568, 207], [1569, 4359275], [1570, 1942342], [1571, 1548229], [1572, 5], [1573, 2127324], [1574, 543351], [1575, 28316], [1576, 125035], [1577, 37765421], [1578, 1124465], [1579, 1239], [1580, 4319204], [1581, 3495627], [1582, 1991363], [1583, 7366282], [1584, 22515], [1585, 60772], [1586, 18334], [1587, 936254], [1588, 5054], [1589, 27591155], [1590, 33528645], [1591, 323], [1592, 7], [1593, 400], [1594, 240447], [1595, 168], [1596, 6890469], [1597, 27764325], [1598, 68225], [1599, 16432], [1600, 898], [1601, 1484532], [1602, 5], [1603, 693007], [1604, 12619647], [1605, 6622], [1606, 2705561], [1607, 1220648], [1608, 989962], [1609, 2417224], [1610, 199], [1611, 64610], [1612, 1531068], [1613, 24232400], [1614, 820164], [1615, 114703], [1616, 15235209], [1617, 4510308], [1618, 196], [1619, 6353540], [1620, 18329], [1621, 5762475], [1622, 5783318], [1623, 677556], [1624, 113005], [1625, 4631067], [1626, 55742], [1627, 925], [1628, 56], [1629, 5192483], [1630, 143908], [1631, 214201746], [1632, 606], [1633, 42264], [1634, 27232420], [1635, 11], [1636, 6], [1637, 1028478], [1638, 1478856], [1639, 46983], [1640, 34], [1641, 1943326], [1642, 6112450], [1643, 338748], [1644, 16935], [1645, 48], [1646, 1475], [1647, 97], [1648, 333], [1649, 17], [1650, 264289], [1651, 1126405], [1652, 185111856], [1653, 136650], [1654, 12048], [1655, 5768714], [1656, 11987961], [1657, 17416870], [1658, 285359], [1659, 2], [1660, 65540], [1661, 1996109], [1662, 849602], [1663, 203547], [1664, 1562516], [1665, 3378203], [1666, 2665428], [1667, 24784], [1668, 166], [1669, 543], [1670, 448806], [1671, 4738521], [1672, 28943], [1673, 3719734], [1674, 3237523], [1675, 4092991], [1676, 35183072], [1677, 64642600], [1678, 2], [1679, 371], [1680, 90740], [1681, 5574147], [1682, 47236], [1683, 14333055], [1684, 31387792], [1685, 5199070], [1686, 1016939], [1687, 616777], [1688, 1783070], [1689, 68928962], [1690, 550776], [1691, 6192680], [1692, 901579], [1693, 240222], [1694, 86], [1695, 3920], [1696, 3950], [1697, 441105846], [1698, 33203733], [1699, 7435523], [1700, 201185375], [1701, 15832132], [1702, 2516163], [1703, 120], [1704, 17681951], [1705, 873], [1706, 12212218], [1707, 1507946], [1708, 4], [1709, 456148562], [1710, 518545], [1711, 46853], [1712, 2353678], [1713, 52487], [1714, 818], [1715, 984652], [1716, 281592191], [1717, 24072986], [1718, 42], [1719, 25780], [1720, 461], [1721, 182], [1722, 221520], [1723, 328360], [1724, 6189827], [1725, 21130867], [1726, 184663], [1727, 15561272], [1728, 9420274], [1729, 245099], [1730, 14045938], [1731, 4980686], [1732, 186], [1733, 14757869], [1734, 1595487], [1735, 53540], [1736, 99624740], [1737, 195333], [1738, 30891012], [1739, 5], [1740, 362713], [1741, 233974], [1742, 100], [1743, 4231939], [1744, 327411], [1745, 79922], [1746, 2208301], [1747, 1141116], [1748, 91145], [1749, 101870], [1750, 334339], [1751, 20448826], [1752, 57533], [1753, 834879], [1754, 73302], [1755, 5799523], [1756, 3], [1757, 293], [1758, 7030012], [1759, 7], [1760, 3938122], [1761, 1606976], [1762, 151], [1763, 788535], [1764, 36461814], [1765, 967190], [1766, 52070910], [1767, 44], [1768, 1331458], [1769, 15240], [1770, 302], [1771, 63], [1772, 29322], [1773, 2573525], [1774, 15528633], [1775, 68251], [1776, 5832701], [1777, 36], [1778, 170], [1779, 27604358], [1780, 80299], [1781, 5867966], [1782, 2364], [1783, 4142617], [1784, 2552072], [1785, 20235], [1786, 12383603], [1787, 5345317], [1788, 2333721], [1789, 180572], [1790, 7170], [1791, 8130012], [1792, 609863], [1793, 11110196], [1794, 10622], [1795, 5145479], [1796, 123], [1797, 19682431], [1798, 85911306], [1799, 12585082], [1800, 88670234], [1801, 1505647], [1802, 39], [1803, 1208412], [1804, 1360899], [1805, 12], [1806, 30196], [1807, 74221610], [1808, 208245], [1809, 5306675], [1810, 1588], [1811, 5], [1812, 1668599], [1813, 878612], [1814, 14331297], [1815, 16020960], [1816, 28780], [1817, 598815], [1818, 19590404], [1819, 132993], [1820, 31541], [1821, 2340038], [1822, 1029770], [1823, 10861], [1824, 82678280], [1825, 2524938], [1826, 454750], [1827, 4110739], [1828, 78704], [1829, 636512], [1830, 930122], [1831, 366472], [1832, 21505374], [1833, 4326458], [1834, 575483], [1835, 26499913], [1836, 2336264], [1837, 977074], [1838, 146508], [1839, 2284255], [1840, 7979], [1841, 110495], [1842, 22356311], [1843, 748827], [1844, 803252], [1845, 152], [1846, 604102], [1847, 35], [1848, 1452080], [1849, 77], [1850, 36148], [1851, 210219], [1852, 4626862], [1853, 1881], [1854, 50645452], [1855, 43436870], [1856, 171263], [1857, 358], [1858, 13184], [1859, 184829865], [1860, 80032], [1861, 46], [1862, 66225663], [1863, 73806], [1864, 372], [1865, 1705455], [1866, 3308749], [1867, 7557168], [1868, 4210124], [1869, 357093], [1870, 15499], [1871, 2515220], [1872, 170489916], [1873, 46939151], [1874, 176], [1875, 902], [1876, 10283], [1877, 17], [1878, 28859], [1879, 4106846], [1880, 3149214], [1881, 132194], [1882, 1000840], [1883, 6040], [1884, 8207], [1885, 261551], [1886, 20479], [1887, 259], [1888, 2142616], [1889, 14393132], [1890, 5708595], [1891, 219632], [1892, 58411], [1893, 345], [1894, 13506946], [1895, 341765], [1896, 38680546], [1897, 2954189], [1898, 171615], [1899, 8338670], [1900, 4481945], [1901, 5569399], [1902, 140674], [1903, 19], [1904, 6], [1905, 3117598], [1906, 2525730], [1907, 524], [1908, 50], [1909, 55679], [1910, 26], [1911, 38774414], [1912, 1594791], [1913, 92670], [1914, 1521406], [1915, 43], [1916, 2654070], [1917, 4280912], [1918, 235], [1919, 444656], [1920, 85343], [1921, 571003], [1922, 77], [1923, 67130], [1924, 1542795], [1925, 208926], [1926, 12235484], [1927, 10597404], [1928, 34859], [1929, 4833773], [1930, 1337789], [1931, 1338], [1932, 293997], [1933, 1165917], [1934, 703402], [1935, 4435774], [1936, 1098950], [1937, 77], [1938, 129], [1939, 11187], [1940, 148815], [1941, 14886], [1942, 14769], [1943, 2030], [1944, 2527841], [1945, 7187412], [1946, 1000160], [1947, 1396449], [1948, 691390], [1949, 3786048], [1950, 2941], [1951, 152849], [1952, 178082], [1953, 494122582], [1954, 11140], [1955, 163693], [1956, 10512], [1957, 447258], [1958, 1971720], [1959, 1016], [1960, 14148403], [1961, 969196], [1962, 3169864], [1963, 3907448], [1964, 2153434], [1965, 51926902], [1966, 21706], [1967, 41927], [1968, 2633209], [1969, 115], [1970, 9332432], [1971, 2844511], [1972, 608446], [1973, 53948788], [1974, 246], [1975, 85], [1976, 8], [1977, 1978], [1978, 5262402], [1979, 29764166], [1980, 357], [1981, 162770], [1982, 555246], [1983, 345], [1984, 11492881], [1985, 3199], [1986, 3745249], [1987, 454030], [1988, 151], [1989, 37987971], [1990, 49971827], [1991, 27381], [1992, 14924], [1993, 347811], [1994, 3990], [1995, 4592749], [1996, 815], [1997, 833], [1998, 840095], [1999, 874777]]\n"
     ]
    }
   ],
   "source": [
    "def qerror(X_test, y_test) :\n",
    "    msle = 0\n",
    "    N = len(y_test)\n",
    "    for i in range(N) :\n",
    "        msle += (X_test[i]-y_test[i])**2\n",
    "    return msle / N\n",
    "params = {'n_estimators': 500, 'max_depth': 4, 'learning_rate': 0.01}\n",
    "# clf = RandomForestClassifier(n_estimators=500, max_leaf_nodes=16,random_state=42)\n",
    "# clf = ensemble.GradientBoostingRegressor(**params) \n",
    "kf = KFold(n_splits = 10)\n",
    "best_clf = None\n",
    "best_score = 100\n",
    "for train_index, test_index in kf.split(X_train):\n",
    "    # create neural network using MLPClassifer\n",
    "    clf = MLPRegressor(verbose = True, solver = 'sgd', activation = 'relu', max_iter = 5000, hidden_layer_sizes = (200, 200),random_state = 1, batch_size = 128)\n",
    "    x_train, x_test = np.array(X_train)[train_index], np.array(X_train)[test_index]\n",
    "    y_train, y_test = np.array(Y_train)[train_index], np.array(Y_train)[test_index]\n",
    "    clf.fit(x_train, y_train)\n",
    " \n",
    "    y_predict = clf.predict(x_test)\n",
    "    test_score = qerror(y_predict, y_test)\n",
    "    \n",
    "    print(test_score)\n",
    "    \n",
    "    #compare score of the tree models and get the best one\n",
    "    if test_score < best_score:\n",
    "        best_score = test_score\n",
    "        best_clf = clf\n",
    "    \n",
    "    #print(clf.n_outputs_)\n",
    "#clf = MLPRegressor(early_stopping = True, tol = 1e-6, verbose = True, solver = 'adam', activation = 'relu', max_iter = 5000, hidden_layer_sizes = (200, 200),random_state = 1, batch_size = 8)\n",
    "#   clf = MLPRegressor(\n",
    "#     loss='ls'\n",
    "#   , learning_rate=0.1\n",
    "#   , n_estimators=1000\n",
    "#   # , subsample=1\n",
    "#   # , min_samples_split=2\n",
    "#   # , min_samples_leaf=1\n",
    "#   , max_depth=7\n",
    "#   # , init=None\n",
    "#   # , random_state=None\n",
    "#   # , max_features=None\n",
    "#   # , alpha=0.9\n",
    "#   # , verbose=0\n",
    "#   # , max_leaf_nodes=None\n",
    "#   # , warm_start=False\n",
    "#   )\n",
    "\n",
    "Y_test = best_clf.predict(X_test)\n",
    "\n",
    "print(Y_test)\n",
    "\n",
    "Writeline = []\n",
    "for i, pre_card in enumerate(Y_test):\n",
    "    Writeline.append([i, int(math.exp(pre_card))])\n",
    "    \n",
    "print(Writeline)\n",
    "\n",
    "with open(\"xxx.csv\", \"w\", newline = '') as csvfile:\n",
    "    csv_writer = csv.writer(csvfile)\n",
    "    csv_writer.writerow(['Query ID', 'Predicted Cardinality'])\n",
    "    for i in Writeline:\n",
    "        csv_writer.writerow(i)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1 0.006778348381995141 84658 91923\n",
      "2 1.8045655818955424 5080 1325\n",
      "3 0.28848377170597866 1201931 2056564\n",
      "4 0.1997290014742001 1416358 2214440\n",
      "5 0.1270225397974213 1502802 2146275\n",
      "6 0.37815148328818393 770 1425\n",
      "7 5.301898110478397 39 3\n",
      "8 0.1735961415654672 1623057 1070004\n",
      "9 2.056301564564239 228055 54358\n",
      "10 0.8070626164193869 10203229 4155104\n",
      "11 0.5440522009076423 32 68\n",
      "12 1.6606790641666835 2806869 773674\n",
      "13 1.227891496583688 7901943 23931879\n",
      "14 0.4288982293169786 24672850 12817356\n",
      "15 0.08458461122715887 957645 715973\n",
      "16 0.024750990035045703 936056 1095537\n",
      "17 0.4408068160698314 55119 107065\n",
      "18 0.0007042378559109717 2555336 2624056\n",
      "19 21.66229804697002 725569 6907\n",
      "20 0.8509469867038472 13491 33938\n",
      "21 1.8153557605123811 71 276\n",
      "22 0.07243352995830678 5244449 4006970\n",
      "23 0.5349454032379369 185711826 89371025\n",
      "24 1.3853690049590655 93 304\n",
      "25 1.1799907893669368 224326 75703\n",
      "26 0.0696469504270527 4617052 3546103\n",
      "27 1.0610774356444892 41576 14841\n",
      "28 1.7393734474810478 85 22\n",
      "29 1.348823700437775 354 1133\n",
      "30 0.033562308219578514 15029 12513\n",
      "31 0.6756926974837197 2182250 959206\n",
      "32 0.6218485905634532 49826 22645\n",
      "33 0.05410073828384852 42092289 33357042\n",
      "34 0.07062019419265649 4638816 3556278\n",
      "35 3.924366819651561 3 28\n",
      "36 51.73446417737349 30 41218\n",
      "37 0.05939028310509507 23504865 18421274\n",
      "38 3.0095107788677518 1482134 8400386\n",
      "39 0.1042026270706889 6008536 4350874\n",
      "40 4.579366577773993 4606172 39147744\n",
      "41 0.018951511120752225 3099339 3556772\n",
      "42 0.02816935144997435 903147 1068192\n",
      "43 0.25152302495150075 2516953 1524289\n",
      "44 9.255075949306589 1298 61\n",
      "45 0.08183464468722305 266736 200375\n",
      "46 2.427814650747429 37 7\n",
      "47 6.233716157555766 84 6\n",
      "48 1.0091876850362373 25 70\n",
      "49 12.700907529209948 2011 56\n",
      "50 2.904819954310224 31212822 5677299\n",
      "51 0.04267441623330382 787632 968365\n",
      "52 0.013026685288890843 29890370 33504202\n",
      "53 5.161310369013557 2725329 26428454\n",
      "54 0.0063549028394045545 194549 210693\n",
      "55 0.053090265698629575 23397947 18582774\n",
      "56 0.4853077375982942 305636 613412\n",
      "57 0.48292276347294655 2328665 4665625\n",
      "58 0.19283565518984616 1537497 991065\n",
      "59 8.044011525184454 407871 23920\n",
      "60 0.2492301064541318 667174 404973\n",
      "61 0.051534000159365594 208160 165885\n",
      "62 1.0186551158844586 729041 265720\n",
      "63 1.0250500805441802 8041185 2921592\n",
      "64 0.16142831348860576 8430 12599\n",
      "65 1.7965233007473533 637 166\n",
      "66 0.38320954154420384 51 27\n",
      "67 0.04821060433998164 173144 139011\n",
      "68 0.6993161760002322 166218 72027\n",
      "69 1.7322886099257853 125181 466814\n",
      "70 0.00030213072861756814 1122058 1102723\n",
      "71 0.09491762053954887 5708250 4194715\n",
      "72 0.12693683531968336 159070 227154\n",
      "73 1.5694150552333266 48 13\n",
      "74 8.290886691438418 63644 1133097\n",
      "75 0.1776365128642678 36429586 23900796\n",
      "76 1.6346981563715968 256 922\n",
      "77 0.3800358474274125 1015061 1880278\n",
      "78 2.294565239145005 3856469 17540885\n",
      "79 1.798521984705938 142084954 37163804\n",
      "80 0.13767834724808004 7591657 11002255\n",
      "81 4.434708668001893 344 41\n",
      "82 2.127194982917595 186 803\n",
      "83 0.058295567921338276 32995629 25917809\n",
      "84 3.8458657598063093 379800 53439\n",
      "85 0.10620142247947914 41360 29857\n",
      "86 0.032245834164205886 31554 37761\n",
      "87 34.44655931572761 821133 2319\n",
      "88 0.8939178232609901 18450934 47493236\n",
      "89 0.21421636660325233 555 349\n",
      "90 0.02165280288683049 4798209 4141648\n",
      "91 0.07732174411861682 3557017 4697311\n",
      "92 0.026234565067979193 758950 645461\n",
      "93 0.9013112363736634 740977 286745\n",
      "94 0.0011842677659104149 136323 141096\n",
      "95 2.8221009260728014 174863 32592\n",
      "96 0.34497882204101055 4007038 7209513\n",
      "97 0.27494482712047175 2428593 4102777\n",
      "98 1.2847293500154566 583 187\n",
      "99 0.02722337256625828 15953185 18814984\n",
      "100 23.65002519754357 6 905\n",
      "101 0.021855189237532983 72525 62558\n",
      "102 1.8465396256140203 918042 235890\n",
      "103 4.510514159268244 944 112\n",
      "104 0.9367039760183432 3988 10499\n",
      "105 0.07938478697677474 1431904 1080314\n",
      "106 0.22665471374186497 65 40\n",
      "107 0.07977603391101959 33342951 44225118\n",
      "108 0.2568695523297033 941327 567061\n",
      "109 0.018336239765116055 37936501 33132092\n",
      "110 0.11665923212876338 1616250 1148612\n",
      "111 6.03555065745989 23 279\n",
      "112 0.06315894186215867 8 6\n",
      "113 0.10478136613106993 608510 440237\n",
      "114 0.9282537993611535 326865 856632\n",
      "115 0.04873748763526182 773796 964948\n",
      "116 0.005116179842674993 7501557 8057780\n",
      "117 13.135847198447989 74 1\n",
      "118 0.001772133010445876 14554 13954\n",
      "119 0.010840781028557393 26475 23857\n",
      "120 2.875070925617736 2004381 367780\n",
      "121 6.133242447256466 118 9\n",
      "122 0.09729957483582687 5749497 4208825\n",
      "123 0.10949419889541243 1808263 2517499\n",
      "124 0.37660683578776144 71 132\n",
      "125 11.614982021133226 177031 5347617\n",
      "126 0.165094508113435 970374 646364\n",
      "127 4.11150676158031 2300117 17472801\n",
      "128 0.00017798614810592288 1447298 1466736\n",
      "129 8.822648432367792 183121 9391\n",
      "130 2.205813763132667 46776 10592\n",
      "131 0.08441329222083882 227789 170354\n",
      "132 0.0006499420775875635 1727810 1684318\n",
      "133 6.167089746463637 54359 4536\n",
      "134 3.0650179031322375 243789 1403923\n",
      "135 1.260291751308508 294 95\n",
      "136 3.1409440306882943 5240921 890676\n",
      "137 1.7062933821313342 47 12\n",
      "138 1.4675372709257948 143045 480383\n",
      "139 1.2633389616074393 1768250 5441062\n",
      "140 0.0008932852373547964 214805 221322\n",
      "141 1.8318204149697173 1756339 453746\n",
      "142 0.13391997077836976 3238944 2246327\n",
      "143 1.2881794479471858 8 27\n",
      "144 0.028072809396754418 7296 8627\n",
      "145 0.026651853306979823 1360 1155\n",
      "146 3.4820225688303794 843978 5454222\n",
      "147 0.23465429055460735 2199706 1355150\n",
      "148 1.0736164495525988 1067795 3009431\n",
      "149 0.5720291425879821 489 229\n",
      "150 0.16354272861982277 3479253 5213346\n",
      "151 7.481636415717357 2948397 191283\n",
      "152 0.2474854438181965 1294491 2128882\n",
      "153 0.06205192740545756 232995953 298904262\n",
      "154 0.078186734844353 65186 86217\n",
      "155 0.156366555027365 1260425 848756\n",
      "156 8.049161531727961 56430464 3306547\n",
      "157 0.6441108877900316 245054481 109827984\n",
      "158 2.1699434649846525 1373186 314761\n",
      "159 0.09974354808208855 333468 243161\n",
      "160 1.2387712841446388 69 22\n",
      "161 0.06658103565464922 3618286 2795378\n",
      "162 0.6044959056228649 5716297 2626961\n",
      "163 6.045707019517767 10539 123220\n",
      "164 0.24181542207648546 2447096 1496538\n",
      "165 0.34303048181198903 4597807 2559703\n",
      "166 0.11283435320695939 5183205 7252396\n",
      "167 0.0021773774976902656 13275944 12670688\n",
      "168 0.6816075533474368 74466045 32614206\n",
      "169 1.584020766151651 6505279 22901284\n",
      "170 0.004303003469523585 117 125\n",
      "171 2.251243590415382 130802 29173\n",
      "172 0.31917320233358015 11667594 6631699\n",
      "173 0.2597360692030218 264670 440596\n",
      "174 2.546050345832738 12214546 2476862\n",
      "175 0.1780700702152304 1207563 1841514\n",
      "176 0.44565015720588724 692 1350\n",
      "177 2.3029171810420093 81 373\n",
      "178 0.05746572878120653 13231284 10411010\n",
      "179 2.7429621003583975 25644 134361\n",
      "180 0.008097149461897426 1441422 1577142\n",
      "181 0.7204668148905659 21501953 9201261\n",
      "182 0.029779135896614118 3034324 2553391\n",
      "183 18.182854415838232 2061 28\n",
      "184 1.1359498767566367 55674 161634\n",
      "185 4.120506097534618 96184361 12633648\n",
      "186 6.9023048909325135 7416602 102610978\n",
      "187 0.05353664286728653 394010 496586\n",
      "188 2.5270219501521248e-05 1309030 1315627\n",
      "189 0.03159201794812092 45152 53935\n",
      "190 1.9957878297292861 26338 6412\n",
      "191 2.1466922385973386 617346328 142632403\n",
      "192 0.04029318473167343 22664895 18542875\n",
      "193 0.4116554576732003 8948395 4710859\n",
      "194 1.206948960812582 5 1\n",
      "195 0.3025509727583094 25 14\n",
      "196 0.027694713899403707 166547 141014\n",
      "197 0.004562316059180108 33253233 35576913\n",
      "198 2.037851201608798 134751 32326\n",
      "199 7.670728994913339 334 20\n",
      "200 0.06046976627739554 239742 187477\n",
      "201 0.0 1 1\n",
      "202 0.05730073050881874 6320364 4974879\n",
      "203 0.0011608027918666533 4146946 4008037\n",
      "204 5.428622062360495 74445 765105\n",
      "205 0.10898236378106067 77228373 55514383\n",
      "206 2.3388387656103287e-05 27474761 27607955\n",
      "207 1.206948960812582 20 6\n",
      "208 1.2604773853878266 1065 3275\n",
      "209 0.013326559805673622 15292807 17164157\n",
      "210 0.042645594082353 16664441 13555184\n",
      "211 3.645027922330611 8115 54763\n",
      "212 0.025735686064593494 5394794 6333531\n",
      "213 0.8162085123351025 115 46\n",
      "214 0.675735482687744 363822 159913\n",
      "215 0.000933970865539401 14086 13662\n",
      "216 17.18875157658982 16 1073\n",
      "217 3.9202257241783593 477 65\n",
      "218 1.37013498341068 4110888 1275210\n",
      "219 1.26299471657018 4271442 13141590\n",
      "220 39.43801050932924 826812 1548\n",
      "221 0.04477978824481075 156 193\n",
      "222 10.829689276517945 66 1799\n",
      "223 0.00013083652143594212 4693 4747\n",
      "224 0.37539020763189224 1749181 3227923\n",
      "225 0.0023571807871284704 101871 106939\n",
      "226 0.5523240064039748 27524609 57873844\n",
      "227 0.9083362349409171 107039 41269\n",
      "228 0.5624317082740792 1339 632\n",
      "229 8.823362097265735 77 3\n",
      "230 0.005555840798995465 17085247 15858059\n",
      "231 0.20508393834595345 2832582 1800972\n",
      "232 3.536539639584392 11457042 1747236\n",
      "233 0.6810808807105168 445 1017\n",
      "234 0.945148272912527 237080 626775\n",
      "235 0.0022435090827711847 3243564 3093512\n",
      "236 8.157233306891033 187360 10771\n",
      "237 0.041781416353723254 889813 725315\n",
      "238 1.2717775941799527 7668182 23684263\n",
      "239 0.06971242962669699 25498 33203\n",
      "240 8.94567859594911 536502 10678683\n",
      "241 3.6015141074788293 7819 52166\n",
      "242 0.1627700647342843 83866402 125546073\n",
      "243 7.333535891689721 29 1\n",
      "244 0.11560180946584368 21367 30020\n",
      "245 0.22246751294746622 2279087 1422065\n",
      "246 1.885632449723493 1260251 319220\n",
      "247 1.119659135481535 1088010 3134565\n",
      "248 14.660949042731946 8522988 185222\n",
      "249 0.01991769970272405 2808909 2439190\n",
      "250 5.413299715336608 63294 6178\n",
      "251 0.01707430844488987 29865 34034\n",
      "252 0.3593228599660362 158290 86920\n",
      "253 44.18941871447671 62439 80\n",
      "254 0.021255449094798994 1280663 1481671\n",
      "255 0.805279007084883 1873568 4596147\n",
      "256 0.9476280255498893 13963150 5274900\n",
      "257 15.066537517505083 96 1\n",
      "258 4.438447778436102 96006783 11677412\n",
      "259 1.0024954284806933 209104857 76829516\n",
      "260 0.21436136806656383 284084 451359\n",
      "261 1.0846644104968919 169828 481190\n",
      "262 0.36824355133857756 1338949 729828\n",
      "263 0.21138276396033276 341236 540416\n",
      "264 1.4705961605123583 7457 2217\n",
      "265 0.11600916084056294 2078376 1478437\n",
      "266 6.024947457068413 35796 416730\n",
      "267 1.2252689588387717 30868544 93378056\n",
      "268 0.19127575059603677 18541197 11972882\n",
      "269 0.003599967987159517 266671 283161\n",
      "270 1.4976263070418956 4 16\n",
      "271 0.05089039413486748 3531487 2818289\n",
      "272 0.21042720952025862 2626049 1659899\n",
      "273 0.26732966117630547 1905796 1136394\n",
      "274 0.0074485785753021505 20540291 18841902\n",
      "275 4.503008521769418 290019 34740\n",
      "276 0.007063434965601104 155324645 142804015\n",
      "277 0.10328591946342419 3637125 2637449\n",
      "278 2.9949761192468034 2395442 424419\n",
      "279 4.604408886567434 81 700\n",
      "280 0.13085652497464112 27167571 18921201\n",
      "281 0.067577914511375 127 165\n",
      "282 0.193282137623678 540479 838903\n",
      "283 6.7418003850204515 11 160\n",
      "284 0.06566943439371752 636 492\n",
      "285 0.6231893667710616 2810650 1276334\n",
      "286 13.791178069672965 428161 17555966\n",
      "287 17.64009902936128 620656 9306\n",
      "288 0.9875712052250825 140292 378985\n",
      "289 0.00016993068057767678 135431 133677\n",
      "290 2.1585917246917465 237596 1032553\n",
      "291 0.05008587239025595 340093 271896\n",
      "292 3.3583548212738994 49 7\n",
      "293 1.6229208343491806 12967783 46358618\n",
      "294 0.8788988230679163 2995940 1173231\n",
      "295 6.053927483307727 17424 1487\n",
      "296 17.143621982783422 376 5\n",
      "297 0.167396064092421 1284620 853271\n",
      "298 0.0024466844059826214 1955447 1861076\n",
      "299 0.014657374871375826 2635249 2974409\n",
      "300 2.646904706146145 172 33\n",
      "301 0.03298018333821083 22053802 26445592\n",
      "302 8.246543867973982 5 105\n",
      "303 1.0128572646199714 142086 51936\n",
      "304 0.10547594238457493 621152 448902\n",
      "305 0.03394789858390024 140942 169457\n",
      "306 2.76658236309788 13576 2572\n",
      "307 2.6642485182154965 132 25\n",
      "308 2.837178516741016 6403160 34507765\n",
      "309 0.00010648505324426358 3052854 3084520\n",
      "310 0.9036382119281856 3655560 1412908\n",
      "311 0.26375152365929616 4083047 2443120\n",
      "312 0.46824526777371167 336 169\n",
      "313 0.03324115007177121 4 5\n",
      "314 0.007714898162638944 727800 666601\n",
      "315 0.014356980376536528 469304 529044\n",
      "316 0.05631694517829699 41705277 32894838\n",
      "317 1.2563830134719782 226 73\n",
      "318 0.0988101688894667 205608 281551\n",
      "319 0.7179136642167333 6 2\n",
      "320 0.733941012186682 23428 9946\n",
      "321 0.31316983010658395 3 6\n",
      "322 0.08443578829327175 154714 206884\n",
      "323 3.8192969101369516 108644714 15390968\n",
      "324 0.22789472764901303 572219 355007\n",
      "325 6.053645071264261 362 30\n",
      "326 0.4413986519100218 425227 826337\n",
      "327 2.7785722142177476 736135 139004\n",
      "328 0.5568995188158687 115 54\n",
      "329 2.372970924178664 27 5\n",
      "330 0.2815663408252977 16 9\n",
      "331 0.28453722672135884 502636 856871\n",
      "332 0.21579984713825853 1669263 1049002\n",
      "333 0.40657079709337013 266844 504867\n",
      "334 2.838700072860935 82941 447190\n",
      "335 0.9510424677777738 886108 334161\n",
      "336 2.7530798164109545 46 246\n",
      "337 0.0010152663526053092 87305 84567\n",
      "338 0.14298621987270688 142396 207836\n",
      "339 0.09825792995800806 6269898 4582748\n",
      "340 0.11321356601688148 20 14\n",
      "341 1.3282833481713714 12356362 3902663\n",
      "342 0.25874552007317353 288811 480316\n",
      "343 0.05705021267345747 266317597 209733506\n",
      "344 1.3066877155067425 358 1125\n",
      "345 7.537428346734661 6 108\n",
      "346 0.01210019694795367 95 85\n",
      "347 0.04344027564061842 1427073 1158588\n",
      "348 0.025311666739833844 14838 17397\n",
      "349 0.04931594753158252 534622 667562\n",
      "350 0.30430888340149337 2841 4933\n",
      "351 3.230860950335442 2278013 377510\n",
      "352 0.04548247287955177 1966235 2433637\n",
      "353 0.014574621651435615 352522 397756\n",
      "354 0.4205589176638921 1135991 593926\n",
      "355 0.18936069751863116 1200434 776877\n",
      "356 0.05887425753571427 1146647 899606\n",
      "357 3.7692587242059092 4257 610\n",
      "358 0.0625422255379239 195188466 152000095\n",
      "359 0.3435862827720493 3587241 1996151\n",
      "360 0.025146039856653004 4168538 3557259\n",
      "361 0.2628316929877352 77088 128718\n",
      "362 11.92202186148758 2411581 76340\n",
      "363 0.1340698418082271 955802 662748\n",
      "364 0.2312320997780323 137050468 84731145\n",
      "365 0.03845311901241709 2595992 2133735\n",
      "366 0.22099284790536508 1984647 1240286\n",
      "367 0.04018803461154602 652813 534227\n",
      "368 0.01468030751064636 107147122 94920580\n",
      "369 0.006427623927695126 9244564 10016244\n",
      "370 0.4402861370656578 68785744 35426350\n",
      "371 1.6567251394696085 187041 677541\n",
      "372 8.478268364411454 17 330\n",
      "373 0.04849970261815389 12151 9749\n",
      "374 0.1010271191285561 309056 224904\n",
      "375 10.928027091210932 4238 115593\n",
      "376 0.029853699314082206 3752082 4459752\n",
      "377 2.22875396930182 17216 3868\n",
      "378 0.2842365214662101 2807650 1647418\n",
      "379 0.0019580682347040334 29096182 30412601\n",
      "380 1.5694150552333266 6 1\n",
      "381 1.4169739572037638 729 221\n",
      "382 0.047268358913758676 209 260\n",
      "383 0.13444372521148215 13531200 19524378\n",
      "384 0.6110582354924944 5262769 11500144\n",
      "385 0.27317282479084476 6397619 3793414\n",
      "386 0.637614540919013 8 19\n",
      "387 0.6251477574771893 23068674 50863139\n",
      "388 0.5122349337108529 1880465 919259\n",
      "389 0.2027195753171725 2844954 1813580\n",
      "390 0.0030920883833379267 14013 13255\n",
      "391 3.3934047232464963 4850191 768663\n",
      "392 0.18115352137934485 27887289 18220542\n",
      "393 0.03085459939585619 14861887 12467749\n",
      "394 3.012095539952809 1002 5688\n",
      "395 0.0005043012760947968 28434655 29080426\n",
      "396 5.301898110478399 1 19\n",
      "397 2.8777197273996147 1538 8393\n",
      "398 0.3799525691149589 42373 22876\n",
      "399 0.0562752972978675 1512612 1193170\n",
      "400 0.34578329711199574 61370775 110494657\n",
      "401 3.5551069058689233 6243916 947534\n",
      "402 0.03901591062580712 1762 1446\n",
      "403 4.339666560019196 640433 79754\n",
      "404 0.20309268519707468 15275 9733\n",
      "405 4.126717280871235 243 31\n",
      "406 1.390674001235714 2852502 877154\n",
      "407 0.10435638182416233 9022523 12463027\n",
      "408 4.236151864556313 310044 39587\n",
      "409 0.11014424108947864 9092150 12670688\n",
      "410 6.795551983597206 289365 21345\n",
      "411 0.0031304127663768907 22069397 20868520\n",
      "412 1.8122349388683139 2443 635\n",
      "413 0.0343459290592002 910720 756656\n",
      "414 2.7414042976231126 198 37\n",
      "415 0.14206124773920226 1425573 977911\n",
      "416 0.00015123777558852976 357698 353326\n",
      "417 0.7882834387743314 200770 487854\n",
      "418 0.03250991465211652 8471604 7073920\n",
      "419 0.0006008666751068014 23478614 22910089\n",
      "420 0.18262162330504592 1094832 714093\n",
      "421 0.002133782919758337 4622390 4840920\n",
      "422 0.1473763401908429 504720 343816\n",
      "423 7.80261933249311 30611 1873\n",
      "424 0.7179136642167333 6 2\n",
      "425 18.945080447017894 9498 737879\n",
      "426 0.011731574915991277 3571807 3205151\n",
      "427 0.7174690467776943 57153 24500\n",
      "428 0.12721701563369794 9 6\n",
      "429 2.761399565412579e-05 35871 36060\n",
      "430 2.308787758841537 1603 350\n",
      "431 8.983947347098818 156 3144\n",
      "432 0.7825128401909555 5540755 2287668\n",
      "433 0.34549316343675585 8 4\n",
      "434 13.75408474461004 24927 610\n",
      "435 2.6189627934323014 53144 10534\n",
      "436 1.461791303807952 1390938 4660042\n",
      "437 0.09305996360122304 9090282 12332811\n",
      "438 0.1073606730234242 208876 289861\n",
      "439 4.557130760985695 3563906 30132403\n",
      "440 0.4636406252464493 6000484 11855025\n",
      "441 0.0008242701797804201 105 102\n",
      "442 2.096826700068489 58115 13658\n",
      "443 9.412948812782002 42 1\n",
      "444 8.864810331902362 215 10\n",
      "445 0.14744050578187554 6171977 4204012\n",
      "446 2.242704439614744 5454981 1220136\n",
      "447 0.5445214975224182 109615 229269\n",
      "448 1.460421416458227 66801 223678\n",
      "449 0.09809710674377414 3047590 4168498\n",
      "450 0.03205396666856311 535258 447516\n",
      "451 0.18270838581137297 22 14\n",
      "452 7.020575935601383 4821 68225\n",
      "453 0.020674047630509076 5157889 5955482\n",
      "454 0.08122917953419774 19339483 14543460\n",
      "455 1.2233721748847213 4176 1381\n",
      "456 0.8197606766953697 8545825 3455739\n",
      "457 0.026108910597616 8744522 7439807\n",
      "458 3.169734771170209 135986 22923\n",
      "459 8.968382567243881e-08 597806 597627\n",
      "460 0.047635179602386085 45808 56981\n",
      "461 0.03234965556780819 517510 432321\n",
      "462 0.008918578756952341 2546504 2317023\n",
      "463 1.9415262380605405 393085 97576\n",
      "464 0.24273099362962905 3025965 1848829\n",
      "465 0.04022012513451548 348234 425568\n",
      "466 0.09085855870375179 15089 11162\n",
      "467 0.0013684016398507676 2792623 2897862\n",
      "468 0.019903170886979807 37 32\n",
      "469 0.16440195389316548 14 9\n",
      "470 2.5902903939802346 9 1\n",
      "471 0.18622694220446698 5222166 3391838\n",
      "472 0.08364911594750152 936740 701474\n",
      "473 0.307031973658971 78784 137115\n",
      "474 0.7179136642167333 2 6\n",
      "475 0.10695930133911108 5538604 3993612\n",
      "476 0.08276097481015171 7 5\n",
      "477 0.0865560346833897 4436034 3305397\n",
      "478 3.4073085276037336 1479225 9369020\n",
      "479 5.385496293443893 3069931 301491\n",
      "480 0.6480052737783475 5258247 2350927\n",
      "481 8.071710612888468 312 5362\n",
      "482 0.08575649078661336 4388417 3274373\n",
      "483 11.220067984706997 237711 8342\n",
      "484 4.029638778252369 1662883 12378355\n",
      "485 5.107122614699821 1603665 167360\n",
      "486 0.04376276935244163 2125909 1724615\n",
      "487 0.418126690419011 1028 538\n",
      "488 0.09414466875528348 471901 347213\n",
      "489 3.0088408213535325 16 2\n",
      "490 11.947517546449546 825126 26023\n",
      "491 0.05527750493889039 8972 7092\n",
      "492 0.0010079798446702322 30 31\n",
      "493 0.4332426157459716 435772 225632\n",
      "494 0.0 41 41\n",
      "495 5.259277435178145 129 1287\n",
      "496 0.9981040832868885 350072 950694\n",
      "497 0.0340687045576133 415623 345572\n",
      "498 1.110221545233905 21927 62892\n",
      "499 5.640177343107142 42 3\n",
      "500 0.38680906566981416 16177 8685\n",
      "501 0.08712031471326238 14979537 20122661\n",
      "502 0.06998261290615537 30022 39114\n",
      "503 4.174150950682456 377 48\n",
      "504 5.579377265604563 61 657\n",
      "505 1.160460236218358 56597024 19273089\n",
      "506 0.6634103943522393 235371 104236\n",
      "507 0.025472598792541566 6683 5697\n",
      "508 7.116073754202339 95463 6626\n",
      "509 17.762799974937618 202 2\n",
      "510 1.3121908310752604 1861350 592024\n",
      "511 1.4607458282544448 42 143\n",
      "512 1.9908842399800437 40 9\n",
      "513 0.8676042632482618 12025147 4737678\n",
      "514 0.056256797176512365 3359634 2650231\n",
      "515 0.22998632629656987 5497167 3403022\n",
      "516 0.033651776041082486 1530813 1274246\n",
      "517 0.13075352997345616 339419 487278\n",
      "518 0.3637886985346458 2791276 1527068\n",
      "519 3.2148584567223906 7524070 45200582\n",
      "520 0.6595345688152938 132641 298798\n",
      "521 2.4486642830345864 219 1051\n",
      "522 2.7283516526233265 1157 221\n",
      "523 0.5301507879626763 5563 11523\n",
      "524 0.009084030374332749 9 10\n",
      "525 0.24349351389393004 391153 640693\n",
      "526 0.07241021315321147 4831 6323\n",
      "527 6.429667865075687 100 7\n",
      "528 0.07412055195055585 67247 88290\n",
      "529 0.7478640030496642 1306403 3102069\n",
      "530 0.15022041937961003 56290 82939\n",
      "531 0.22189531028808177 304742 488102\n",
      "532 0.18711401003571043 29557059 19177847\n",
      "533 0.0010886404123828803 20425126 21110285\n",
      "534 68.57290036565689 607937 153\n",
      "535 7.016457257347011 82475 1166037\n",
      "536 2.50891401822102 330302786 67765592\n",
      "537 0.0 6 6\n",
      "538 0.14026643209646147 379272 260794\n",
      "539 0.08414723131940949 39914207 29863915\n",
      "540 0.07479004106721322 3930585 2990111\n",
      "541 0.4804530139182014 61 30\n",
      "542 0.17349481602906405 239810029 158114582\n",
      "543 0.05016233102157923 79941786 63900629\n",
      "544 0.02432338267728979 1591745 1361887\n",
      "545 0.13522151391821302 12 8\n",
      "546 0.14595063142799233 474 695\n",
      "547 0.31224092488858074 296822 519008\n",
      "548 0.6377043304540609 992137 446436\n",
      "549 2.778835481957111 219042 1160088\n",
      "550 0.1638773292341979 771 514\n",
      "551 6.130495331045457 196 2342\n",
      "552 0.03520948580798519 36296 30086\n",
      "553 1.2939788286558183 5854058 1876865\n",
      "554 0.03188666136536042 250300 299235\n",
      "555 0.0 1 1\n",
      "556 0.21784523231898995 10343328 6485719\n",
      "557 0.03784237410157001 121116 99705\n",
      "558 0.032802222078155946 7419592 6190465\n",
      "559 0.0008995873406329821 267115 275248\n",
      "560 0.045072818044278755 378087 305766\n",
      "561 0.008056351506207564 393 430\n",
      "562 0.09487503219486555 96122 70640\n",
      "563 0.010421004933971911 133905 120910\n",
      "564 0.027333945000091512 5011720 5912738\n",
      "565 0.097112325104147 5747092 4208328\n",
      "566 0.15375773970455386 58354 39425\n",
      "567 0.596146039587341 10036360 4637194\n",
      "568 1.9922349351105872 35764834 8718956\n",
      "569 0.04979304449311734 4 3\n",
      "570 0.056442687129701366 5685635 4483330\n",
      "571 0.1743649397693883 18765187 12359600\n",
      "572 24.58014879471002 2987 20\n",
      "573 0.03805250066448055 201627 245057\n",
      "574 0.05518009301446043 1017426 804424\n",
      "575 0.302921747317647 120319 69391\n",
      "576 0.07460488102067402 351470 461861\n",
      "577 0.002868975173301417 8713223 9192653\n",
      "578 0.26511296413617513 2493834 1490229\n",
      "579 2.2179403791787595 533918 120417\n",
      "580 1.8636056083731298 9026 2304\n",
      "581 44.5918507230828 3177 3\n",
      "582 0.38439123751987914 1335252 718296\n",
      "583 0.16682533920209008 2609280 1734348\n",
      "584 0.3795400978587467 1271176 2353753\n",
      "585 0.6579431423877915 201976 89748\n",
      "586 0.06089544360824756 4752371 3713124\n",
      "587 0.3799741912773037 47676251 25739143\n",
      "588 0.13787788688433686 1604906 1107102\n",
      "589 4.146592779326087 45926380 5993773\n",
      "590 0.0017366136252939192 2106 2020\n",
      "591 0.0013282991303874023 3258245 3379185\n",
      "592 0.13473906425428853 321350 463868\n",
      "593 0.08309403644562621 32845 43819\n",
      "594 0.10882538776108555 5461513 7595932\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "595 8.117534915354609 21 379\n",
      "596 1.8553269585768701 50783 198280\n",
      "597 17.568829055014636 2142058 32394\n",
      "598 0.17303464275268482 1878432 1239196\n",
      "599 2.1725461398914594 1092587 250221\n",
      "600 28.419140396895802 2272 10\n",
      "601 1.7780939155968356 135 515\n",
      "602 0.09391792799313221 2040580 1501964\n",
      "603 0.19617226193279688 1203477 1874100\n",
      "604 0.012848477031280689 3643947 3253451\n",
      "605 0.6456891924184261 203288 454035\n",
      "606 0.010510956051152454 1752929 1582119\n",
      "607 0.6300861393654383 36139229 79930589\n",
      "608 0.3279598592015491 16048073 9051323\n",
      "609 0.008953302785998828 10989171 12079771\n",
      "610 0.9682496737321882 165599 443000\n",
      "611 0.104572490375162 6170 8526\n",
      "612 0.6561360174271592 32315912 72644813\n",
      "613 1.4345441739955909 99343 29989\n",
      "614 0.15491104049523047 366 543\n",
      "615 9.43331175888857 1660 76\n",
      "616 0.06255228644697812 425552 546477\n",
      "617 5.172998420814509 15570 151385\n",
      "618 0.0016664352366117456 23 24\n",
      "619 0.02597422597101009 40101 47114\n",
      "620 0.04357417966309551 7397017 9114086\n",
      "621 3.1399504113795254 9203 54142\n",
      "622 1.9877139818686518 16184 3951\n",
      "623 0.7161477339063173 55925 130357\n",
      "624 2.1783000552164196 34 7\n",
      "625 1.099035996132747 40356 115135\n",
      "626 0.06390949888879886 955808 742300\n",
      "627 0.08667016352361585 12641119 9417380\n",
      "628 2.174270791618537 926858 212142\n",
      "629 1.6241875465601687 8953534 2503305\n",
      "630 0.4219643941591291 435303 833498\n",
      "631 0.06737534811571987 1476346 1138831\n",
      "632 0.6877042484376943 23 54\n",
      "633 0.2934833090573485 16242265 27920406\n",
      "634 0.31316983010658395 19 34\n",
      "635 0.8943047242875778 1881 730\n",
      "636 0.1241627124306839 75642643 53178431\n",
      "637 1.144184255251856 32677906 95236636\n",
      "638 0.4931014767720913 96855 47990\n",
      "639 0.22739514176701964 53478 33195\n",
      "640 0.012460435552481998 1139658 1274246\n",
      "641 2.1946519782503517 86506 19663\n",
      "642 1.760007814993758 359391 1354335\n",
      "643 19.42679405615889 91426 1113\n",
      "644 0.1881716928845403 559118 362336\n",
      "645 0.008496572464038196 15871509 14473925\n",
      "646 0.6569775488690895 599263 1347819\n",
      "647 0.30114853323741747 940021 543012\n",
      "648 0.12145428770715377 59947 84942\n",
      "649 0.12777941216666935 3533279 2471348\n",
      "650 0.4804530139182014 3 1\n",
      "651 6.582917765905267 62330 4790\n",
      "652 1.2526793204263358 15 48\n",
      "653 1.0338960944050688 2385662 6594815\n",
      "654 4.246733387520776 56266 441804\n",
      "655 0.002028983716796658 120826 126393\n",
      "656 4.397269666796791 4729 38508\n",
      "657 0.47644895901557494 344 172\n",
      "658 1.458631308318441 3105458 10390579\n",
      "659 0.14293400161725311 73 107\n",
      "660 2.896768155307857 32 180\n",
      "661 8.730968670547888 7678797 399972\n",
      "662 0.2839666351319085 1499110 879841\n",
      "663 11.989640379039358 956 29\n",
      "664 3.309440659750352 6203965 1006019\n",
      "665 0.06509498240764347 5057546 3918638\n",
      "666 0.0630125503514379 732533 941554\n",
      "667 1.3743931614599358 702154 2267649\n",
      "668 2.297936949638475 79644 362662\n",
      "669 0.008384550930134618 291 319\n",
      "670 17.67813523526404 32221 480\n",
      "671 3.46615648970965 338020 52527\n",
      "672 2.617121975759005 99677 502550\n",
      "673 7.05036326600395 995 69\n",
      "674 0.17180963323482254 6306024 4166207\n",
      "675 0.02953251080575881 75 63\n",
      "676 0.9896539260259927 128727880 47602591\n",
      "677 0.1955486778577604 2120345 1362566\n",
      "678 1.1695086631854694 6811 2309\n",
      "679 0.0059634171189654524 88971779 96114712\n",
      "680 0.12254624385364249 1539315 1084665\n",
      "681 0.1952161809864708 27 17\n",
      "682 0.5808574589340464 6 14\n",
      "683 0.4415384131620028 13915258 7159945\n",
      "684 0.8277691886556634 153 61\n",
      "685 3.3823180146745284 50984477 8104449\n",
      "686 0.03492611065474455 11787585 9778260\n",
      "687 1.5719294447166483 16368 57348\n",
      "688 3.7307749634816707 68 9\n",
      "689 9.770018095860173 48731 1109914\n",
      "690 0.23495766732248005 471936 766296\n",
      "691 0.007918536411495699 645340 705399\n",
      "692 0.268241402692012 710371 423209\n",
      "693 0.048027255984859824 36519756 29332677\n",
      "694 0.7202426428704828 1167867 499827\n",
      "695 0.40650485097138345 69 36\n",
      "696 0.001525938951937163 60714 58388\n",
      "697 0.3930779074450904 19484177 36472625\n",
      "698 0.2913453149886723 20733015 12084978\n",
      "699 0.22120676534614744 3089117 1930075\n",
      "700 0.08912143571830451 98162589 132311278\n",
      "701 0.018921951881198235 7054422 6147820\n",
      "702 2.734153226258475 100740 526409\n",
      "703 0.0025095077880663407 437503 459978\n",
      "704 2.1275577844447118 42 9\n",
      "705 0.3920014798989006 1184980 2216272\n",
      "706 1.8721788170650144 55 219\n",
      "707 0.21226953971304025 101382 160714\n",
      "708 0.3426665194154379 4351072 2423093\n",
      "709 6.995253723129298 606915 8546221\n",
      "710 0.27507341915529876 339693 573936\n",
      "711 0.1945829345124911 2362127 3671786\n",
      "712 0.007454175440676819 650129 708754\n",
      "713 0.08873688392587543 316912 426884\n",
      "714 6.41201217131267 715058 56835\n",
      "715 24.23973185191269 305 42064\n",
      "716 0.006048542577076242 3832 4142\n",
      "717 0.07190779876601781 101892000 77925803\n",
      "718 1.9154207771309677 32903 8244\n",
      "719 2.028617612350156 589 141\n",
      "720 0.008134268638511591 20444829 22374461\n",
      "721 0.0845842305470414 1428609 1068085\n",
      "722 0.0012854537866165388 1129720 1170959\n",
      "723 0.09351809368420867 26097149 19221295\n",
      "724 0.42761982477495886 103 199\n",
      "725 2.876022215082518 349825395 64170926\n",
      "726 0.7445708827173111 24062272 10152905\n",
      "727 0.02770397088421826 11536681 13625959\n",
      "728 0.22442310663506496 9856807 6137571\n",
      "729 0.005421775483386678 115451175 124272958\n",
      "730 3.6519457768423194 384084 2596371\n",
      "731 2.152538413682024 11295740 2604582\n",
      "732 0.24172952700002365 1959639 1198535\n",
      "733 0.5352582568837522 211 101\n",
      "734 0.15961445084198858 3600133 2414405\n",
      "735 63.572428622366246 26119 8\n",
      "736 0.1792131846701237 241283 368451\n",
      "737 0.05328121526848861 503935 400062\n",
      "738 0.8860309185442792 6986378 2725549\n",
      "739 0.04596477200578758 27572 34165\n",
      "740 31.21734757079539 6941 25\n",
      "741 4.168506145766983 359826 2771974\n",
      "742 0.022884518097789047 2451498 2851872\n",
      "743 0.01403629829994107 7039241 7924627\n",
      "744 0.06942476034104902 30630145 39863872\n",
      "745 0.16924649372797945 60417690 40040288\n",
      "746 1.8687971419710872 4460552 17501949\n",
      "747 15.676356451322842 21335 406\n",
      "748 7.699190811368066 929 57\n",
      "749 0.8715501220518163 8577323 3372159\n",
      "750 2.9469489703933456 65911 11841\n",
      "751 0.0564540068433474 711803 561269\n",
      "752 6.529493934494997 102 7\n",
      "753 0.0007189745894831901 1723335 1677740\n",
      "754 2.12563751334805 1821 423\n",
      "755 0.49702133941190646 30115482 14880356\n",
      "756 0.6206832016303462 320 145\n",
      "757 5.567633055074684 2201 207\n",
      "758 0.14230686552160798 478650 328236\n",
      "759 0.10747874709370706 6149461 4430555\n",
      "760 3.2847167567616196 293 47\n",
      "761 0.24252753556646445 645009 394174\n",
      "762 0.04259417705815498 2246298 2761205\n",
      "763 1.7734914229507193 1836865 484971\n",
      "764 0.08952734966782565 1492258 1106364\n",
      "765 1.9957993231912698 27 114\n",
      "766 0.6262230458355552 1699649 3750033\n",
      "767 0.25063646519683286 21545 13059\n",
      "768 20.273259049891475 360 3\n",
      "769 0.5392330670360203 57448 119727\n",
      "770 0.0009617054571329212 6698 6909\n",
      "771 2.5569960344645835 830412 167814\n",
      "772 6.9899194782053105 14959773 1063450\n",
      "773 0.16741293563220566 31445832 20886545\n",
      "774 0.2520963200260869 37 22\n",
      "775 0.002339894280048892 30661039 32180646\n",
      "776 2.262248815493294 8 1\n",
      "777 0.5240861692497897 41205 84988\n",
      "778 0.06303816451151417 4940445 3843492\n",
      "779 0.9321886069957002 14939354 5688802\n",
      "780 1.6562587606427632 1691 6127\n",
      "781 0.0007249840992458585 2919039 2841491\n",
      "782 1.1341488634689751 271 788\n",
      "783 0.27180724084221747 1075762 1811909\n",
      "784 0.3396421125298338 27908194 49984281\n",
      "785 0.13570639375813448 41746132 60339702\n",
      "786 0.10477517963590804 1761287 1274246\n",
      "787 4.88265199425143 90578 825421\n",
      "788 1.440121905876559 8640750 2602411\n",
      "789 0.2756935401829103 2194324 1297979\n",
      "790 0.21968727277284172 2312593 1447244\n",
      "791 0.3002446297403056 326454652 188735246\n",
      "792 0.05963215552331987 46 59\n",
      "793 0.06453439404364898 4357386 3379866\n",
      "794 0.8686872799885177 13372 33962\n",
      "795 0.9431897780048827 129304 341501\n",
      "796 1.7901589050516389 1057400 277438\n",
      "797 0.0010417897694011684 82826 85543\n",
      "798 1.9031867011810306 148 591\n",
      "799 0.2135836169287452 561275 353562\n",
      "800 2.442674882778816 405738 85012\n",
      "801 1.055216900746215 112768 315001\n",
      "802 21.447186438204227 368844 3593\n",
      "803 1.5656073743763432 21999 76882\n",
      "804 1.973252305225561 18179 4461\n",
      "805 0.26228548227071113 338818 565439\n",
      "806 1.3678776839737115 1916702 6172904\n",
      "807 0.023811313540692523 1034817 1207478\n",
      "808 0.0007471674373892994 8921101 9168317\n",
      "809 0.41377083191285596 429 225\n",
      "810 0.965399642359086 2743672 1027110\n",
      "811 0.10762200711008968 728939 525070\n",
      "812 0.1551359090438759 406188 273949\n",
      "813 0.8154990500437884 52718148 21368329\n",
      "814 0.24279063651545318 261024523 159473292\n",
      "815 1.089709142498428 50014 142054\n",
      "816 0.025010831152451826 4694773 4008037\n",
      "817 2.236501266417167 57 12\n",
      "818 0.8976007736894972 80779 208334\n",
      "819 0.00289829719845368 1397456 1324212\n",
      "820 3.5008327174327207 1946 12645\n",
      "821 0.019315367212768265 947397 824468\n",
      "822 0.2352776260016423 3756049 2312463\n",
      "823 0.5248374675538925 593165 287440\n",
      "824 0.0662366512823932 19027002 14709511\n",
      "825 0.04979304449311734 4 3\n",
      "826 8.038641304661317 3856393 65692339\n",
      "827 0.49422041273450684 181579534 89898894\n",
      "828 0.6019984897559608 6373405 2933652\n",
      "829 3.275488666784335 5954492 974641\n",
      "830 0.20629600150481506 59129 37544\n",
      "831 0.6390399869068039 5108 2296\n",
      "832 3.2293584140407425 1345932 8118366\n",
      "833 0.21165857950882047 3161090 1995417\n",
      "834 6.166624243412988 1448329 120891\n",
      "835 0.2583584348388414 48683015 29283976\n",
      "836 15.911993723744294 107 1\n",
      "837 0.22199661687926586 5547410 3463106\n",
      "838 0.7338224754086992 92921096 39453245\n",
      "839 0.012402182860704651 39930670 44634620\n",
      "840 0.701319687100436 224181 97028\n",
      "841 0.7152605871113854 32752 76303\n",
      "842 7.480608247127399 2892178 44570858\n",
      "843 2.1586015100175997 81340 18716\n",
      "844 24.753212901423847 280 40684\n",
      "845 1.365793544017437 173427 558041\n",
      "846 0.6647425593085504 162589 367435\n",
      "847 39.721041014668025 93 51315\n",
      "848 0.8808256632375756 4840160 1893495\n",
      "849 0.10511073418912878 8718711 6304499\n",
      "850 0.01825885677281982 6872175 6003576\n",
      "851 1.9765002123782818 343300 1400364\n",
      "852 4.113385312994768 14 113\n",
      "853 0.6202331819146344 6324167 13900534\n",
      "854 1.2229474381519945 598673 198114\n",
      "855 2.6224652874680956 3031760 600339\n",
      "856 0.1276160263012964 467110 326794\n",
      "857 1.5992325415105002 115749771 32681953\n",
      "858 3.370551271642051 179338 28598\n",
      "859 0.07235138906631401 4171514 3187691\n",
      "860 0.3168087879919503 177574 101142\n",
      "861 0.8817084276756286 32526680 83183915\n",
      "862 0.0 1 1\n",
      "863 9.448554147208872 7 172\n",
      "864 0.01767259611983012 128808860 147122845\n",
      "865 0.008623311774763873 75054774 68398875\n",
      "866 0.4987770357275864 49734 24543\n",
      "867 0.7014949200772456 149388 345193\n",
      "868 0.2650159612729489 1173706 701432\n",
      "869 0.015951424967471393 6782298 7695340\n",
      "870 0.1559270701154646 48408026 32615564\n",
      "871 0.0012136760388964706 29426220 28418725\n",
      "872 0.06767642951220391 94873 73141\n",
      "873 0.33365208231802573 5505037 3089586\n",
      "874 3.381122109921341 7819314 1243355\n",
      "875 0.07853469680984597 1593634 2109092\n",
      "876 0.0027026138585059894 9679994 10196535\n",
      "877 0.34872364665655925 244572 441439\n",
      "878 0.0917205117222078 80093172 59165385\n",
      "879 0.5957272173499685 289121 625581\n",
      "880 0.2959313804636901 62585 107827\n",
      "881 0.010160991484607012 10975323 9922924\n",
      "882 18.718196705223814 118431 1564\n",
      "883 0.3343997151980669 15147 8495\n",
      "884 0.044959743026416664 50737 62721\n",
      "885 1.5582429776887405 3013671 864903\n",
      "886 0.34997518657112175 1300350 719676\n",
      "887 2.7849430808867446 252912 1341924\n",
      "888 1.9376201218794014 1191225 4792092\n",
      "889 0.2393557223420779 1495196 2438779\n",
      "890 0.47071644170184546 534979 269384\n",
      "891 0.8803499147053079 22 8\n",
      "892 0.03571231130871164 203289 245576\n",
      "893 0.030084246815416986 1934735 2301172\n",
      "894 6.1893427455706025 417345 34676\n",
      "895 2.273766913239054 130 28\n",
      "896 4.895116314600897 58005444 6347403\n",
      "897 0.0003608855174129351 5632 5526\n",
      "898 0.2339012321372602 5533300 3411496\n",
      "899 0.08632484900698016 1756116 2355883\n",
      "900 0.9565771275242225 134 358\n",
      "901 0.016584225638808365 3620237 4117801\n",
      "902 0.07196556650367034 16 12\n",
      "903 0.6310477027334598 53899 119284\n",
      "904 1.794021224778228 499 130\n",
      "905 1.8848308175095003 154151 39057\n",
      "906 21.398573404805955 20315 198\n",
      "907 7.918766361004208 60540 1009641\n",
      "908 2.112989368969579 726325 3107616\n",
      "909 0.18619129665732218 2169618 1409242\n",
      "910 4.777222267146754 137863 15495\n",
      "911 0.25179944344227495 146 88\n",
      "912 0.002680869198726177 30350661 31963525\n",
      "913 8.43698258470049 3103 169\n",
      "914 0.28257715436807 148338343 87174852\n",
      "915 0.038538842551879525 698279 573814\n",
      "916 4.307814994195063 509 63\n",
      "917 0.3214957216162421 1257452 713253\n",
      "918 0.0017733771983939482 96 92\n",
      "919 0.014737038908125085 3519100 3973318\n",
      "920 29.43194344433359 1055503 4648\n",
      "921 1.4969503576774825 2171 638\n",
      "922 0.044021895032170424 636921 516374\n",
      "923 0.48091386074073844 5627515 2812822\n",
      "924 13.20130985888424 6125047 161867\n",
      "925 1.9999750401380878 563118 2316228\n",
      "926 0.3658431741874917 2842188 1552279\n",
      "927 0.6086356857007665 228033 497524\n",
      "928 2.9822351674099252 6245 35122\n",
      "929 0.06080919875357642 905 707\n",
      "930 0.5163463034649308 2934056 6019224\n",
      "931 0.19607543088346321 2112279 1356575\n",
      "932 0.06701611516606715 1465277 1131076\n",
      "933 2.309870321744774 31 6\n",
      "934 0.00014735110617002868 2484687 2515032\n",
      "935 0.2812752449345111 2550 1500\n",
      "936 0.0012255471158654535 343631658 331809997\n",
      "937 2.6114966540724236 25589 128793\n",
      "938 6.145126085764255 41 500\n",
      "939 8.156947582053055 19765561 1136443\n",
      "940 2.864248891547982 10910321 59270902\n",
      "941 0.8541710065178316 4773883 1894483\n",
      "942 0.7101757990293958 1597 687\n",
      "943 0.005297560909571938 701 754\n",
      "944 0.3929063894572898 159100 297781\n",
      "945 0.11506730643662735 218 155\n",
      "946 0.2609428178959135 239 143\n",
      "947 1.8444993982084121 143 559\n",
      "948 0.02017446112709261 29985178 34561489\n",
      "949 0.01658444658244464 403126 458532\n",
      "950 2.322310019464516e-08 1712826 1712565\n",
      "951 2.695480864229548 699251 3611227\n",
      "952 0.345493163436756 9 17\n",
      "953 0.2676842172774651 498631 836519\n",
      "954 1.1606250059945529 212334 623586\n",
      "955 0.9472780965373846 95788 36192\n",
      "956 1.1505497095729607 113 38\n",
      "957 0.15382390950333558 18539 12524\n",
      "958 2.306477206023339 1789 391\n",
      "959 0.04046375314648812 685566 838321\n",
      "960 2.812512301225864 222 1192\n",
      "961 0.11385344446617798 3776736 2695108\n",
      "962 6.604378463691962 72716 5565\n",
      "963 0.4267058571016144 693512 360878\n",
      "964 1.0054017543983342 1201 440\n",
      "965 0.006966402832620697 3071932 3339335\n",
      "966 2.7180752637236743 129 24\n",
      "967 4.802520832717177 570804 5107743\n",
      "968 0.1911990300653758 2970946 1918641\n",
      "969 0.22688637306218473 16165080 10039501\n",
      "970 2.372970924178664 185 867\n",
      "971 3.207108387973524 287658 1724367\n",
      "972 9.979734502532578 33276 783630\n",
      "973 0.16575230365919105 1777883 1183287\n",
      "974 0.019475369194949098 6155249 7077066\n",
      "975 0.0005943026930526411 23972065 23394732\n",
      "976 1.7223092289918875 1483 5512\n",
      "977 6.493330885419276 1124 87\n",
      "978 0.422705479011254 1027077 536096\n",
      "979 69.6881989813914 8442 1\n",
      "980 13.981758186373789 38110 905\n",
      "981 0.09054289849604762 1824473 1350382\n",
      "982 0.06274576075634146 5016044 3904581\n",
      "983 3.0556747054225903 986416 5665372\n",
      "984 0.0003841485723951926 2243920 2200368\n",
      "985 1.4326231530202826 3218538 10653088\n",
      "986 0.2334928638774597 40268 65286\n",
      "987 0.8532800740988373 446266 1123999\n",
      "988 0.6898585077517481 223 513\n",
      "989 0.001903088073656639 576609 602320\n",
      "990 0.5627994168193559 2460071 5209011\n",
      "991 11.075359148029204 2118 75\n",
      "992 0.11284945285362727 1322238 944967\n",
      "993 0.06976125406932837 3721979 2858028\n",
      "994 0.09798564020880413 1185834 1621697\n",
      "995 1.2116509002135774 12732833 4235213\n",
      "996 0.4948794326667566 598851 296348\n",
      "997 0.5805570551072764 192771 89977\n",
      "998 0.9620260235635408 39 14\n",
      "999 0.048664538759536406 2580326 2069517\n",
      "1000 0.0035382641702407635 17017 18060\n",
      "1001 0.0716021736737806 971379 1269404\n",
      "1002 4.581017649244713 1721171 14633839\n",
      "1003 1.5097391834967249 5979903 1750129\n",
      "1004 0.0005377503556471759 1744 1704\n",
      "1005 1.7458535932656312 17558753 4684433\n",
      "1006 0.048915818784651864 16901085 13547589\n",
      "1007 2.1066557433899122 10610116 2485255\n",
      "1008 10.188571406156198 3114 127\n",
      "1009 0.2785427835801305 78605 133248\n",
      "1010 0.09010273636078374 1000065 1350178\n",
      "1011 1.4848383590909406 810 2742\n",
      "1012 7.57994855303981 203 12\n",
      "1013 2.6581671709869097 82269 420058\n",
      "1014 1.381961639460416 1583845 5131636\n",
      "1015 0.539127865203279 3800 7920\n",
      "1016 3.933025889616252 819 5957\n",
      "1017 9.384830086423404 31545 1473\n",
      "1018 1.220143381828785 13262 40027\n",
      "1019 1.304199085550591 18154 56880\n",
      "1020 1.2330258003894732 350607 1064313\n",
      "1021 3.2308768726108865 8029284 1330601\n",
      "1022 8.116974321206182 106 1847\n",
      "1023 0.17622052260204066 9888590 6498655\n",
      "1024 2.126771174463055 1350368 314123\n",
      "1025 6.105023239973339 3607 42690\n",
      "1026 6.731297757383938e-05 1772757 1758272\n",
      "1027 0.3386326850585917 18 33\n",
      "1028 0.6675414901319657 2749974 1214775\n",
      "1029 10.472457323628241 584 22\n",
      "1030 0.003889178777048675 1089885 1160018\n",
      "1031 5.242488269749254 32792 3321\n",
      "1032 2.134217947746297 1010445 4354840\n",
      "1033 12.90838150600268 41273 1499753\n",
      "1034 0.04511648330382197 8726593 7056637\n",
      "1035 0.8907392994019154 42688 109697\n",
      "1036 0.11562784435459836 1402950 998537\n",
      "1037 2.136173799940417 1365268 316568\n",
      "1038 0.8138989290854975 320610 130068\n",
      "1039 0.031014457183175855 8028853 6732405\n",
      "1040 3.3792714644679847 6 43\n",
      "1041 0.000400953646147524 11046023 11269436\n",
      "1042 0.23686145047846305 84829 138010\n",
      "1043 0.21993226844212196 3112690 1947444\n",
      "1044 0.2522049994406697 1033425 625426\n",
      "1045 0.20029597915430983 1050192 671278\n",
      "1046 2.8554757524030565 33803245 6238510\n",
      "1047 0.07367075083002558 5782707 4408120\n",
      "1048 0.1109331605479401 996890 1390901\n",
      "1049 5.237684700886886 1064 107\n",
      "1050 2.7899866355783933 11305159 2127470\n",
      "1051 6.719566186356573 78378 5866\n",
      "1052 5.068314762472869 94 9\n",
      "1053 0.02996265550414143 224063 266407\n",
      "1054 0.0006195868485055106 17830705 18280108\n",
      "1055 0.005949710774188684 13607585 12597432\n",
      "1056 2.149458452032469 1065780 246006\n",
      "1057 0.1058487614745531 1286561 1781253\n",
      "1058 0.17764674876092298 5025149 7659424\n",
      "1059 0.023350791507061493 1598 1862\n",
      "1060 4.8623818563550625 519964 4716594\n",
      "1061 9.877600682151089 2696449 116375\n",
      "1062 3.8165054593287504 55698 392897\n",
      "1063 0.04902596365432101 14867688 11914691\n",
      "1064 0.020794681135235433 5035991 4359715\n",
      "1065 6.0146442689293815 3670 315\n",
      "1066 7.602640031903479e-07 628763 628215\n",
      "1067 0.0010567266126681623 4789797 4636597\n",
      "1068 15.11653384435013 3551396 72754\n",
      "1069 0.08256416286495064 21421 16071\n",
      "1070 2.1430106562222386 30179 130461\n",
      "1071 2.171677731532606 114926 501677\n",
      "1072 2.4208199230200003 3893 18454\n",
      "1073 0.0053610814231468745 2947433 2739335\n",
      "1074 0.03937748610669683 60032 73209\n",
      "1075 0.2037865563300217 162 255\n",
      "1076 0.00048281704478100076 22051861 21572598\n",
      "1077 0.009057356914932427 440187 484138\n",
      "1078 0.5240961944997731 76792 37231\n",
      "1079 3.018764946308668 7461463 42402654\n",
      "1080 0.11209778987662809 229630 164294\n",
      "1081 0.2686971471554878 72044020 42901949\n",
      "1082 0.18526579671195148 46179823 30027626\n",
      "1083 0.16657925443113925 6281063 9446842\n",
      "1084 0.10248419118769996 303927 220667\n",
      "1085 0.36695464820812734 1879704 1025670\n",
      "1086 0.35059464077983554 358 648\n",
      "1087 0.20605254425682587 2823073 4444895\n",
      "1088 4.324077125263812 15 1\n",
      "1089 1.0678331412164486 12864 36156\n",
      "1090 0.4804530139182014 3 1\n",
      "1091 2.7940510494558244 18575280 3491355\n",
      "1092 0.03371864591602527 4690181 5635566\n",
      "1093 11.162905492367072 3 112\n",
      "1094 1.6655048064765297 2317768 8424544\n",
      "1095 0.0043297704383939714 32958434 35200074\n",
      "1096 0.006643208967607076 12481 11504\n",
      "1097 0.7922917527128569 229542 94252\n",
      "1098 10.326888439585126 40029 995431\n",
      "1099 0.23739356891605862 29846 48584\n",
      "1100 4.884559245777041 16088 146677\n",
      "1101 0.3149430642988186 452025 792297\n",
      "1102 1.6019916102515641 1273361 359141\n",
      "1103 0.2639716353800096 2459589 1471397\n",
      "1104 0.5056060491683838 6464001 13161664\n",
      "1105 12.787687790239453 53353018 1493231\n",
      "1106 4.1067397490472555 94261 715217\n",
      "1107 1.9230567796806246 13369 3340\n",
      "1108 6.44969314512581e-07 11447755 11438565\n",
      "1109 0.9668817261482628 1079 403\n",
      "1110 1.39715132205264 4923 1509\n",
      "1111 0.00457769271330325 99 106\n",
      "1112 0.024861159406159378 894283 763832\n",
      "1113 0.3573208648376789 1691636 930468\n",
      "1114 0.18354679140763486 2023045 1318085\n",
      "1115 0.2646696620813178 693798 414768\n",
      "1116 0.21921525433410224 1622132 1015658\n",
      "1117 0.8973270143246012 888612 2291438\n",
      "1118 0.12267629249696413 6136360 4323131\n",
      "1119 3.5644156369395645 205813 31155\n",
      "1120 0.7154306930279176 568738 244102\n",
      "1121 0.41566312734242045 9330 4896\n",
      "1122 0.06604686827482201 900184 1163974\n",
      "1123 0.10427374369121177 194796 141039\n",
      "1124 0.02328053055072655 181 211\n",
      "1125 1.8981582373987205 135015 535461\n",
      "1126 0.01195734710438691 26861653 29965580\n",
      "1127 0.18313077321277418 2359432 1538000\n",
      "1128 0.28762028479910506 411980 704352\n",
      "1129 0.7588680618731447 153 367\n",
      "1130 0.006945466679132665 3306001 3593328\n",
      "1131 20.397896064764026 913685 9984\n",
      "1132 0.5680962150498109 423078 199105\n",
      "1133 1.4224822701836983 414178 125664\n",
      "1134 0.1926338070376854 15558142 24130698\n",
      "1135 0.2842233022973862 11377809 19390648\n",
      "1136 0.13769306407755771 16845498 11623318\n",
      "1137 20.49561811100194 184 1\n",
      "1138 5.301898110478399 10 109\n",
      "1139 0.8520026963871765 301346 119727\n",
      "1140 0.7916739518976731 142797776 58654767\n",
      "1141 0.04910512430660148 1293 1614\n",
      "1142 0.04551867898892872 429318 346834\n",
      "1143 0.05483058650852891 1519572 1920505\n",
      "1144 0.5795548294936073 500 233\n",
      "1145 0.1856473768771136 2394857 1556524\n",
      "1146 0.03244045494479732 2386714 1993327\n",
      "1147 0.1880346854215883 1030547 667951\n",
      "1148 0.07521075533874746 866861 658940\n",
      "1149 0.7853022898937997 3662 1509\n",
      "1150 1.2973888477502178 3174834 9917346\n",
      "1151 2.6779442481655242 87734201 17079334\n",
      "1152 0.055951320236852155 287301 226782\n",
      "1153 0.6096144477209163 1780298 815464\n",
      "1154 1.1181835135282518 130034732 45166678\n",
      "1155 5.009409051273836 108575 11579\n",
      "1156 0.18074085640620974 1088584 711586\n",
      "1157 0.8657950331678174 132846730 52389960\n",
      "1158 0.07388453880879078 254572 193982\n",
      "1159 0.9767713282291187 138698 372643\n",
      "1160 9.202665863896812 9107126 438439\n",
      "1161 2.6034023006606075 107937695 21499892\n",
      "1162 0.042059060527796045 17020057 13864192\n",
      "1163 1.5873511445612913 5079556 17905810\n",
      "1164 2.740410446751475 1030362 5394228\n",
      "1165 0.001793395312057057 2565 2676\n",
      "1166 6.404768887806037 205 2587\n",
      "1167 2.4357184790176407 3061 642\n",
      "1168 0.07659169052976708 8082732 10659825\n",
      "1169 0.28515670382949615 29551126 17324491\n",
      "1170 23.915514595899268 265 1\n",
      "1171 0.16440195389316548 2 1\n",
      "1172 2.7947206317251574 297 55\n",
      "1173 7.732336128578186 22 370\n",
      "1174 0.02715767875612989 302144 356274\n",
      "1175 0.00029412297798982006 159787 162551\n",
      "1176 0.0854894977118324 100088728 134080846\n",
      "1177 0.584495088585469 2049488 954151\n",
      "1178 0.227573733603657 3618 2245\n",
      "1179 0.013625002370572691 1509940 1343588\n",
      "1180 6.833614911283262 395 28\n",
      "1181 0.10154312346916047 16560082 12041232\n",
      "1182 0.10755739771076142 2633016 1896804\n",
      "1183 2.1789019742465072 5611816 24556705\n",
      "1184 0.5456457182661802 1366949 2861254\n",
      "1185 2.179891190786015 52 231\n",
      "1186 0.18285276212984206 3416338 2227670\n",
      "1187 0.06318828532991878 4694070 3650730\n",
      "1188 0.0424980072000266 16689643 13580540\n",
      "1189 0.1546696080687059 84671 57139\n",
      "1190 0.3365637354130844 409803 732028\n",
      "1191 0.791336689648183 581945 239081\n",
      "1192 0.1370359895789042 74806445 51661887\n",
      "1193 0.05871993563764557 29112000 22847198\n",
      "1194 0.27080290430153836 30725 51701\n",
      "1195 1.8153705225184371 1976 7605\n",
      "1196 0.0032698709726627223 2411683 2553609\n",
      "1197 0.0011339607217431948 7754834 7498043\n",
      "1198 0.11045522002127377 39270383 28166197\n",
      "1199 0.05819530642384476 1973867 2512383\n",
      "1200 0.04785241594205104 37562 46747\n",
      "1201 1.5694150552333266 6 1\n",
      "1202 0.08276097481015168 2 3\n",
      "1203 6.508292953120012 470 6038\n",
      "1204 0.0875012274472689 4471095 3326189\n",
      "1205 0.3295021578786703 13492 7599\n",
      "1206 0.21556840657488271 2803659 1762321\n",
      "1207 0.06309110196473562 31948703 24852346\n",
      "1208 1.8366798967680236 1077 277\n",
      "1209 4.499979934840389 37 316\n",
      "1210 0.017702612628463548 5621345 6421310\n",
      "1211 0.2823068127675464 6058847 3561543\n",
      "1212 0.015925977627144636 981621 1113656\n",
      "1213 0.33012785967964814 6552 3688\n",
      "1214 0.21130250079803553 2099443 3324598\n",
      "1215 0.22141695478594786 5321952 3324404\n",
      "1216 0.2224653095856252 2918093 4676700\n",
      "1217 0.08608416306413819 4507332 3361221\n",
      "1218 39.39183517749025 823777 1548\n",
      "1219 2.2467406959388154 87470 391591\n",
      "1220 14.091553126259171 2700915 63273\n",
      "1221 0.2941156364192314 1117 649\n",
      "1222 0.4276198247749587 24 12\n",
      "1223 0.6392443577663924 195 435\n",
      "1224 0.10593074602473646 2108895 1523018\n",
      "1225 0.403941516195306 15696758 8313579\n",
      "1226 0.7026473936043586 142589 61665\n",
      "1227 12.416102995886332 314 10680\n",
      "1228 13.938658592966352 395324 16533757\n",
      "1229 4.1526435088439 42011 322388\n",
      "1230 0.30168899899575563 7645954 13242600\n",
      "1231 38.40787083132848 9337 18\n",
      "1232 0.2810505361128647 356108 605090\n",
      "1233 0.047977140769538316 1329678 1655286\n",
      "1234 0.9312925704361008 2702281 1029487\n",
      "1235 0.0309616099487869 35057028 29400656\n",
      "1236 21.714622209519803 844 7\n",
      "1237 0.0765355465548345 4103531 3111787\n",
      "1238 0.004306842955261551 14951600 15965735\n",
      "1239 0.6937376070685242 22 9\n",
      "1240 0.05058187268448061 4795368 3829546\n",
      "1241 0.20710503484231302 2585023 1639918\n",
      "1242 0.06026921891240827 380233 486036\n",
      "1243 5.2613333421857895 2368 238\n",
      "1244 0.3510128919786342 1753 3171\n",
      "1245 0.6980491139088177 228568 527068\n",
      "1246 3.291438929365595 134 21\n",
      "1247 1.7281982900691852 38666129 10384916\n",
      "1248 0.012371141905672777 246 220\n",
      "1249 0.2904737707294284 85043 49610\n",
      "1250 0.12334512055045051 6066418 4269783\n",
      "1251 0.13522151391821302 259 179\n",
      "1252 0.1668591940672022 1105375 734695\n",
      "1253 0.2540380196805375 662420 400165\n",
      "1254 0.010469405560215318 9137587 8248870\n",
      "1255 0.32417613007847246 16550 29247\n",
      "1256 0.02331512164644936 2276349 2651872\n",
      "1257 2.141497041972535 782 3382\n",
      "1258 0.09083114059943827 6397403 4732767\n",
      "1259 0.8395887053184746 4 1\n",
      "1260 1.724986962385044 31 118\n",
      "1261 0.24542410055688182 4179502 2546676\n",
      "1262 0.027304486337717055 2671669 2264746\n",
      "1263 1.8417921656358454 10135 2608\n",
      "1264 0.5998599199728437 37881 17460\n",
      "1265 0.5895530070335742 143469 309187\n",
      "1266 4.876355774960514 5252 47800\n",
      "1267 1.9218120556728056 2 11\n",
      "1268 19.892888193180863 172 1\n",
      "1269 1.290883337603407 1211725 3774304\n",
      "1270 3.924366819651561 28 3\n",
      "1271 0.5824301290694716 224315 481172\n",
      "1272 0.030809554027367923 5371515 6402167\n",
      "1273 0.0007958721130449742 1709566 1662011\n",
      "1274 0.02179165430889301 1600380 1380742\n",
      "1275 0.10090473065379553 3255786 2369736\n",
      "1276 10.769833457204651 5776 216\n",
      "1277 0.2823376363813578 37877 22264\n",
      "1278 0.01197623296194717 710376 636738\n",
      "1279 0.018493995629432214 3508696 4019820\n",
      "1280 2.9919873819955116 13675462 2425085\n",
      "1281 0.008639140282854006 4288167 3907557\n",
      "1282 10.585909082166038 204805967 7912656\n",
      "1283 0.06265329077474241 1037139 807477\n",
      "1284 0.9272491775674558 4150897 1584686\n",
      "1285 0.4228176290197973 4598926 2400263\n",
      "1286 0.04239592966751013 1553105 1264091\n",
      "1287 4.015221877019798 17590721 130473932\n",
      "1288 34.79888894528557 107219 293\n",
      "1289 9.797491689593949 3729565 163032\n",
      "1290 0.2461066793019512 14693326 24130698\n",
      "1291 0.30129484459372363 7677236 13292008\n",
      "1292 0.0700225634138406 15320001 19960933\n",
      "1293 0.4180819544872234 23401 44674\n",
      "1294 0.23530467543268346 585991 360763\n",
      "1295 2.0235512668164883 19912 82589\n",
      "1296 0.01597037265223508 3313019 3759304\n",
      "1297 0.04013418485990481 12194 14899\n",
      "1298 0.3035157253859725 3285335 1893725\n",
      "1299 0.06485712796991655 833929 646438\n",
      "1300 0.029444701117799663 3911962 3295126\n",
      "1301 0.015635978106590384 27627 31307\n",
      "1302 1.1164490716592095 2609896 907273\n",
      "1303 0.7982966667060192 57196188 23406476\n",
      "1304 0.11915342877419781 6109921 4326365\n",
      "1305 0.011294759825946292 1736049 1561013\n",
      "1306 0.15134762792269127 1805860 1223851\n",
      "1307 0.42194158087707356 478423 916046\n",
      "1308 0.007948206431553846 20398004 18658174\n",
      "1309 0.4804530139182014 5 2\n",
      "1310 0.05418303855261587 5647808 4474949\n",
      "1311 0.19818950276522235 2425335 1553926\n",
      "1312 0.2601170610621922 98197 163530\n",
      "1313 1.1847505829436709 1124262 3338728\n",
      "1314 0.2640551344639498 209272 125182\n",
      "1315 4.7983878877391835 233087 2083778\n",
      "1316 0.3177767047517485 326821552 185990843\n",
      "1317 0.646431715677301 323 144\n",
      "1318 6.933148006000346 333 23\n",
      "1319 13.669127063505833 362 8\n",
      "1320 0.06079084536941004 641685 821109\n",
      "1321 3.1495758314450883 6452 1093\n",
      "1322 0.004704740663970445 23818 22239\n",
      "1323 2.6976667605771807 1759098 9090766\n",
      "1324 0.08080932383611353 1706835 2268028\n",
      "1325 0.04320829535367583 43439 35286\n",
      "1326 2.2769180110473104 29726478 6573800\n",
      "1327 0.4614278421325859 715886 362939\n",
      "1328 0.10796236718105989 1706720 2370616\n",
      "1329 0.0009428824918005579 454368 440628\n",
      "1330 3.242196364416549e-07 108916 108854\n",
      "1331 0.9116234992023654 52081 20045\n",
      "1332 4.341795324992682 82162224 10226660\n",
      "1333 0.2250880008449478 1688859 1050870\n",
      "1334 0.36262656729197545 94352674 51668834\n",
      "1335 0.18257964226131404 261572924 170616734\n",
      "1336 0.5456122211504486 3371 7057\n",
      "1337 0.1852958292899816 6039769 9288953\n",
      "1338 0.004298961169513175 10842367 11577086\n",
      "1339 1.5968476003315193 11113 3140\n",
      "1340 0.3725775159427201 49562 91252\n",
      "1341 0.059455256603998835 28790825 22560989\n",
      "1342 1.389227600401126 12 3\n",
      "1343 0.12689427587365373 16697256 23842427\n",
      "1344 0.4533598003303993 1932081 3788302\n",
      "1345 9.568240684953198 204 4519\n",
      "1346 0.01595010505628773 51 58\n",
      "1347 10.135244094199262 47638 1973\n",
      "1348 0.1529790465976616 140872433 95271374\n",
      "1349 24.206212310512576 958 6\n",
      "1350 0.23856299296303793 98836 161079\n",
      "1351 13.176653942433004 155 5882\n",
      "1352 0.25621279790407564 7371639 4443602\n",
      "1353 0.5717580179749248 3028619 1421851\n",
      "1354 0.1940466751438584 750101 482847\n",
      "1355 0.6185827375528014 6852285 3120776\n",
      "1356 0.15719781329966914 316221 470090\n",
      "1357 0.025822906861371227 3389300 2886164\n",
      "1358 1.2842432496543579 6844 2203\n",
      "1359 0.9710100594512173 804386 300269\n",
      "1360 0.2677361430281307 9114117 15290884\n",
      "1361 8.625333289434401 6 131\n",
      "1362 5.054490535696179 874 8286\n",
      "1363 10.642998882010376 469 17\n",
      "1364 0.3942538712011371 171717 321741\n",
      "1365 1.079815545563836 2447 865\n",
      "1366 1.2159864930909297 886730 294365\n",
      "1367 0.8163185546185583 19250 47515\n",
      "1368 0.32487919701826723 1055110 596702\n",
      "1369 1.0656865520831484 1413770 503555\n",
      "1370 0.006368749406202093 2641869 2861343\n",
      "1371 0.06403051173000472 1860250 2395887\n",
      "1372 0.514958238902668 204990 100018\n",
      "1373 0.16384258911648478 482 723\n",
      "1374 2.642836847354893 17505559 88961079\n",
      "1375 2.1697944914825746 29816 6834\n",
      "1376 0.4236068634150897 12660 6603\n",
      "1377 0.05877671591354995 57629 73440\n",
      "1378 65.38576528860725 22743 6\n",
      "1379 0.04358347274474236 18628675 15118747\n",
      "1380 0.7707292007871981 10799050 4488624\n",
      "1381 1.6932844177051611 267643 983305\n",
      "1382 0.08700687961578138 2019 1503\n",
      "1383 3.3059975480151333 11790184 72639411\n",
      "1384 0.20072519348621534 1463219 934835\n",
      "1385 39.54912904624157 850848 1579\n",
      "1386 0.7377210604523757 196361 463527\n",
      "1387 21.11802703845278 6337 63\n",
      "1388 0.005355411253612226 16405276 15247603\n",
      "1389 0.2916957115721299 603222 351495\n",
      "1390 0.28002362191218055 26021179 15328873\n",
      "1391 0.3473607629121538 25245254 14002907\n",
      "1392 0.1266448212865597 230107 161204\n",
      "1393 1.8767627593393401 1152 292\n",
      "1394 0.0007701119920322313 27187286 26443187\n",
      "1395 0.38957714450164477 77931 41748\n",
      "1396 0.3480562756762862 101 183\n",
      "1397 0.006430642655433761 190707 176011\n",
      "1398 0.11456445730655766 394098 552843\n",
      "1399 14.765214251555506 30038 643\n",
      "1400 13.97017417543854 125 2\n",
      "1401 0.5165066186642734 113934 233763\n",
      "1402 1.2609093860212839 7713 23710\n",
      "1403 0.00026083182891911656 76528 77774\n",
      "1404 1.1318798998706598 78608 227780\n",
      "1405 0.0406279122627416 3116952 2547957\n",
      "1406 1.5089837846613576 41643238 12191414\n",
      "1407 1.0775090026976029 2578392 913140\n",
      "1408 0.7758040938031252 203856 84488\n",
      "1409 0.15851886130397155 5978061 4014657\n",
      "1410 32.666360832175904 44313 145\n",
      "1411 6.146532793164426 4113350 344733\n",
      "1412 0.20818000410495516 3150907 1996554\n",
      "1413 1.4485473874800738 106378 31926\n",
      "1414 0.11406054605676458 5999279 4279819\n",
      "1415 0.4966864588527648 126022 62283\n",
      "1416 1.7557720816968532 24514198 6515584\n",
      "1417 0.40973364917787664 20184378 10641958\n",
      "1418 0.0745373212148243 183415 240993\n",
      "1419 0.20015906329948888 470374 735772\n",
      "1420 1.1218024502244555 26962874 77758873\n",
      "1421 1.532577454119947 54569 188190\n",
      "1422 5.276273536454793 178 17\n",
      "1423 8.051844214569735 4781226 280023\n",
      "1424 1.34672088655279 6503423 2037748\n",
      "1425 0.008310368958789123 2259223 2474856\n",
      "1426 0.6160069720749846 2480626 1131619\n",
      "1427 1.326363156935496 150591 47602\n",
      "1428 0.03135745473504759 44603 53244\n",
      "1429 15.278847288562474 4441 221388\n",
      "1430 0.45205859620414085 25556 13046\n",
      "1431 0.38498964031566285 5836306 10854432\n",
      "1432 1.733966550032667 257877 69108\n",
      "1433 0.2962143970997995 2031150 1178622\n",
      "1434 9.324706690918566 25 550\n",
      "1435 0.88883179390627 189993 74010\n",
      "1436 0.619464070827625 102696 225617\n",
      "1437 9.460341388179444e-05 849917 858224\n",
      "1438 4.67639024674974 1329 152\n",
      "1439 2.097195667790885 422 1799\n",
      "1440 2.0585229060483 11541 2748\n",
      "1441 0.12258426200444242 412661 290762\n",
      "1442 0.17029080838871724 898339 594597\n",
      "1443 4.376540611513245 845129 6846608\n",
      "1444 5.047930839738349 70528 666995\n",
      "1445 0.05678418898869475 41627983 32801632\n",
      "1446 2.861767725079994 37 6\n",
      "1447 0.013167371568709153 21429217 24034840\n",
      "1448 72.90132974362557 71487 13\n",
      "1449 0.032839283472700466 52297540 43629486\n",
      "1450 0.2785986772909316 1612559 951227\n",
      "1451 0.3214352257000263 170 96\n",
      "1452 0.023762432091205918 5 6\n",
      "1453 6.082309950293158 8247 97144\n",
      "1454 0.012005242738218628 1254162 1124006\n",
      "1455 0.4776256637597804 679839 340614\n",
      "1456 0.40436479824578947 322541 609189\n",
      "1457 1.7665117454106962 280293 74197\n",
      "1458 0.463554039385811 6413744 12670688\n",
      "1459 0.4597216894202835 3478 1765\n",
      "1460 0.09384066893235625 5692135 4190211\n",
      "1461 0.003988693163708461 20725294 19456842\n",
      "1462 10.55707521535847 11181632 433920\n",
      "1463 0.18753220467987197 188162 122028\n",
      "1464 0.18089815774923543 19112145 29243147\n",
      "1465 0.020738187369221527 2836767 2456303\n",
      "1466 0.24738042146107533 2937411 1786315\n",
      "1467 1.1723590108166275 6593218 2232866\n",
      "1468 0.021039634605347798 16626 14381\n",
      "1469 0.12589820836226792 461 323\n",
      "1470 0.29306051111336684 952029 554044\n",
      "1471 0.7760615021956558 1274397 528099\n",
      "1472 0.0020801936444733112 709772 678127\n",
      "1473 0.19423226881117422 1421619 914917\n",
      "1474 9.390496835096984 2998 139\n",
      "1475 14.315617988861906 999 43973\n",
      "1476 0.05913114828013728 28654912 22469434\n",
      "1477 0.1654816412595367 28618823 19053871\n",
      "1478 0.029354186814390493 71066 59876\n",
      "1479 0.09883527902698161 1644685 1201016\n",
      "1480 0.00012262677100016123 3720189 3679220\n",
      "1481 0.29571927886092536 809764 470098\n",
      "1482 0.01825307519527527 836993 958069\n",
      "1483 1.1932695166782359 436262 1300642\n",
      "1484 0.08644748873413935 1330868 991845\n",
      "1485 6.405054724425887 237430 18897\n",
      "1486 0.11077033881895831 572033 410089\n",
      "1487 0.18827177410449178 1158736 1788242\n",
      "1488 0.076632920401474 9001170 6824561\n",
      "1489 0.013224500404516783 1949544 2187137\n",
      "1490 5.640177343107142 42 3\n",
      "1491 0.15427039071823054 8800 13034\n",
      "1492 0.11321356601688148 9 13\n",
      "1493 0.00024702583779352954 909020 923420\n",
      "1494 1.345136752578244 1008 3217\n",
      "1495 0.13418173628604202 148771 103141\n",
      "1496 0.20939618953453126 228348 360853\n",
      "1497 0.07769342261928869 288740 381558\n",
      "1498 0.21011460826382727 2681263 1695377\n",
      "1499 3.5990642370534065 19 2\n",
      "1500 5.556910401604504 675 63\n",
      "1501 3.369117970516054 79181 12631\n",
      "1502 0.11761110232544358 30 21\n",
      "1503 0.6098356776397965 33332 15265\n",
      "1504 0.32205229303723965 7727 13630\n",
      "1505 0.03324115007177121 53 44\n",
      "1506 0.001251603390427789 18278618 18936854\n",
      "1507 0.43581212548256815 655 338\n",
      "1508 0.7396741958103469 54973 129917\n",
      "1509 1.003486364064839 30328 11137\n",
      "1510 3.3581713968551226 8784129 54898064\n",
      "1511 0.08223924788917193 19538354 14667080\n",
      "1512 0.08382802140691285 9745 7295\n",
      "1513 3.6554957144618054 1359 200\n",
      "1514 2.5499590817900986 78 15\n",
      "1515 0.18695951853409778 2426887 1574946\n",
      "1516 0.014261951948301576 84463 95177\n",
      "1517 2.0366610925356112 35 149\n",
      "1518 0.12329012439710282 39620 27888\n",
      "1519 0.24796597658989203 312703 514511\n",
      "1520 0.13567937274345807 6499560 4496897\n",
      "1521 0.24585422585209477 52841 32183\n",
      "1522 0.18603470880508585 189157 122886\n",
      "1523 0.026667262013745616 8475166 9978586\n",
      "1524 5.097883006114282 305 31\n",
      "1525 0.0365853732999258 2160144 1784078\n",
      "1526 0.09145770116928213 1806987 2445087\n",
      "1527 0.01713053367089918 2056452 1804166\n",
      "1528 0.4430335029477517 379585 738549\n",
      "1529 1.2780026543645522 34518203 11145089\n",
      "1530 14.76579268177138 942287 20199\n",
      "1531 0.09363839512304586 4116896 3031615\n",
      "1532 0.7472621624532448 569 1352\n",
      "1533 0.0178306328162444 6 7\n",
      "1534 1.1954964379787856 10305316 3453099\n",
      "1535 2.262248815493294 8 1\n",
      "1536 5.0376790782643415 61 584\n",
      "1537 0.5196876310347567 194594778 94634840\n",
      "1538 0.7424175168995725 357756 846821\n",
      "1539 0.2118651142514911 14339 9049\n",
      "1540 1.4774261362278707 75905 255951\n",
      "1541 0.22553968205903333 116292 186983\n",
      "1542 0.022469657301231582 2882131 2480924\n",
      "1543 0.13810252617027488 470860 682787\n",
      "1544 0.8306963850958743 400127 995464\n",
      "1545 2.057029582461665 8353 35056\n",
      "1546 0.0701056005938979 151597 197552\n",
      "1547 0.00021484887065930782 4672 4741\n",
      "1548 0.13239623197755043 29519 20515\n",
      "1549 0.002503163926159624 3626 3449\n",
      "1550 0.8031955571954431 518594 1270714\n",
      "1551 0.30707317089096414 11921491 20748721\n",
      "1552 0.002430449850200625 31717004 33319824\n",
      "1553 0.045281648237957495 20299094 16408203\n",
      "1554 0.2609428178959135 4 2\n",
      "1555 4.08047321001288 391 51\n",
      "1556 3.8493117125955947 26089 185585\n",
      "1557 0.3702285528717853 214 116\n",
      "1558 41.50632379655608 17 11303\n",
      "1559 1.0752482644534633 100505 283484\n",
      "1560 0.00032429975789648527 24975536 24529794\n",
      "1561 0.2842836112220242 391 229\n",
      "1562 0.24228019527621406 7064772 11557568\n",
      "1563 0.18677629115838962 6834478 4436225\n",
      "1564 0.3955630531213227 465853 873763\n",
      "1565 0.024565158995985197 24376127 20839943\n",
      "1566 0.023675021588390258 678176 790981\n",
      "1567 1.2055668112494617 1067788 3201351\n",
      "1568 0.08403551436872125 5441432 7271271\n",
      "1569 1.0680664138193345 207 73\n",
      "1570 0.3346469999581827 4359275 7774061\n",
      "1571 1.2630323056564439 1942342 5975943\n",
      "1572 0.15183231552924734 1548229 2285920\n",
      "1573 4.23696170394275 5 46\n",
      "1574 0.008419975679329224 2127324 1940808\n",
      "1575 0.0956245094954567 543351 398825\n",
      "1576 7.733303773906425 28316 456844\n",
      "1577 6.790285745168101 125035 9232\n",
      "1578 0.04973961217445166 37765421 30215955\n",
      "1579 0.024724751871829525 1124465 960853\n",
      "1580 1.1318324168228653 1239 3592\n",
      "1581 0.0837825159838202 4319204 3233674\n",
      "1582 0.09667538613901293 3495627 2561482\n",
      "1583 0.002556877242967203 1991363 1893172\n",
      "1584 0.00012973942464501357 7366282 7450666\n",
      "1585 0.0036100253501599065 22515 21202\n",
      "1586 3.961765519567442 60772 444772\n",
      "1587 0.24997658923467395 18334 11120\n",
      "1588 0.02195972190643131 936254 1085803\n",
      "1589 0.05328653366665126 5054 4012\n",
      "1590 0.25458398680712635 27591155 16658691\n",
      "1591 3.214808358098928 33528645 5581242\n",
      "1592 15.911993723744294 323 5\n",
      "1593 0.16440195389316542 7 11\n",
      "1594 2.6390443548379663 400 78\n",
      "1595 0.005975696390956241 240447 222560\n",
      "1596 0.35246352614391235 168 305\n",
      "1597 0.18979145813120518 6890469 4457056\n",
      "1598 0.0019293276177525243 27764325 26571197\n",
      "1599 0.6334411585910805 68225 151216\n",
      "1600 2.845214334692827 16432 3041\n",
      "1601 0.6393912249500162 898 1999\n",
      "1602 0.16837143042824876 1484532 2237660\n",
      "1603 0.023762432091205918 5 6\n",
      "1604 0.26530384246693123 693007 414040\n",
      "1605 0.0022792237183638774 12619647 12031325\n",
      "1606 8.400759456107254 6622 364\n",
      "1607 0.13587182410930362 2705561 3911486\n",
      "1608 0.07276638033890417 1220648 1598610\n",
      "1609 0.015647982704563038 989962 873558\n",
      "1610 0.06162754817090765 2417224 1885835\n",
      "1611 0.0470512502473189 199 160\n",
      "1612 0.07310505307699251 64610 49303\n",
      "1613 0.21204563651299704 1531068 966070\n",
      "1614 0.008332610120715782 24232400 22118347\n",
      "1615 0.2943684718120932 820164 476728\n",
      "1616 0.2908664593501467 114703 66888\n",
      "1617 0.0039360727298830295 15235209 16221658\n",
      "1618 0.08418336062212722 4510308 3374414\n",
      "1619 0.025357297768187514 196 167\n",
      "1620 0.5091327831747093 6353540 12968815\n",
      "1621 22.14675162457794 18329 2027527\n",
      "1622 0.0691112760572995 5762475 4430342\n",
      "1623 0.07392613609499166 5783318 4406514\n",
      "1624 0.2603796356676247 677556 1128638\n",
      "1625 0.23838275217773158 113005 184137\n",
      "1626 0.24075343131689242 4631067 7564405\n",
      "1627 0.15082611287684375 55742 37802\n",
      "1628 1.0708451553114378 925 328\n",
      "1629 1.4495505135564588 56 189\n",
      "1630 0.22404051478016818 5192483 3234527\n",
      "1631 0.3141197893050664 143908 82163\n",
      "1632 0.27674183941970965 214201746 126577621\n",
      "1633 0.17814881309020633 606 397\n",
      "1634 4.304261906022952 42264 336510\n",
      "1635 0.01820983787067627 27232420 23794735\n",
      "1636 1.206948960812582 11 3\n",
      "1637 0.31316983010658406 6 3\n",
      "1638 2.05500725874238 1028478 245256\n",
      "1639 0.022175402088821856 1478856 1274246\n",
      "1640 2.971022535123948 46983 263346\n",
      "1641 6.03555065745989 34 2\n",
      "1642 0.004090555951752036 1943326 2071677\n",
      "1643 0.03571139804824105 6112450 5059932\n",
      "1644 0.07085950112945516 338748 259579\n",
      "1645 10.356735903582576 16935 423108\n",
      "1646 2.525668537181941 48 9\n",
      "1647 7.702303567480907 1475 91\n",
      "1648 11.893469263201448 97 3082\n",
      "1649 3.681004235372576 333 2274\n",
      "1650 1.3690667362785358 17 57\n",
      "1651 0.5762432373894825 264289 123709\n",
      "1652 0.34331115812870905 1126405 2023764\n",
      "1653 0.8111663043150037 185111856 75212132\n",
      "1654 0.2915746590084724 136650 79634\n",
      "1655 2.0988144875650714 12048 51300\n",
      "1656 0.06999262084916008 5768714 4427734\n",
      "1657 0.1868462045835624 11987961 18470226\n",
      "1658 0.041475580822248545 17416870 14207695\n",
      "1659 3.0338273090966275 285359 49996\n",
      "1660 0.16440195389316548 2 1\n",
      "1661 9.973399970798056 65540 2785\n",
      "1662 0.004186295088343419 1996109 1871047\n",
      "1663 0.1278419388988387 849602 1214780\n",
      "1664 0.0002694576969220687 203547 200233\n",
      "1665 0.25057737794667123 1562516 2577641\n",
      "1666 0.09043512242912795 3378203 2500822\n",
      "1667 0.0385082505794985 2665428 2190499\n",
      "1668 3.893971011822377 24784 3444\n",
      "1669 3.9303046500735177 166 22\n",
      "1670 24.134178026346305 543 3\n",
      "1671 0.010163722760360204 448806 496412\n",
      "1672 0.6790307909179457 4738521 10802293\n",
      "1673 0.7255425252530469 28943 12348\n",
      "1674 0.06967013672242849 3719734 2856797\n",
      "1675 0.3943186108753665 3237523 6066347\n",
      "1676 0.25742480440253257 4092991 2464294\n",
      "1677 0.04975869837231966 35183072 28148624\n",
      "1678 0.26564306121244724 64642600 38608324\n",
      "1679 0.0 2 2\n",
      "1680 6.51065247754365 371 28\n",
      "1681 0.06822083153637226 90740 117824\n",
      "1682 0.09093543687242424 5574147 4123013\n",
      "1683 2.024589726310514 47236 11384\n",
      "1684 0.017404434389586144 14333055 16354356\n",
      "1685 0.05755262490144128 31387792 24692949\n",
      "1686 0.05308824529598605 5199070 4129147\n",
      "1687 0.026824502674302045 1016939 863307\n",
      "1688 2.94540309831498 616777 3431410\n",
      "1689 0.006110917874833848 1783070 1648992\n",
      "1690 0.5685197546954468 68928962 146507839\n",
      "1691 4.679642443580609 550776 4791400\n",
      "1692 0.2159280468268116 6192680 3891082\n",
      "1693 0.012990652589008432 901579 1010423\n",
      "1694 0.4165781022203178 240222 125981\n",
      "1695 2.2737669132390548 86 392\n",
      "1696 1.6529957426257378 3920 1083\n",
      "1697 6.8529173063677 3950 54150\n",
      "1698 0.5617334690071152 441105846 208470189\n",
      "1699 23.979078433179946 33203733 248035\n",
      "1700 0.03841001641940688 7435523 6112184\n",
      "1701 0.004550193904599817 201185375 188061986\n",
      "1702 0.14398586085733858 15832132 23138462\n",
      "1703 0.03172820718531617 2516163 2105622\n",
      "1704 1.519026607617314 120 414\n",
      "1705 0.0020848663687456203 17681951 16892742\n",
      "1706 1.26361460835242 873 283\n",
      "1707 1.3695922973697627 12212218 3789147\n",
      "1708 1.0220463011414647 1507946 4144209\n",
      "1709 5.749901739308773 4 54\n",
      "1710 0.7064708039024318 456148562 196822119\n",
      "1711 56.93692316920358 518545 273\n",
      "1712 0.29378081926175026 46853 80563\n",
      "1713 0.8279939912799543 2353678 947467\n",
      "1714 0.11016185296378621 52487 37662\n",
      "1715 1.6656155199742497 818 2976\n",
      "1716 0.8071116278355651 984652 2417968\n",
      "1717 0.08263336756704807 281592191 211241006\n",
      "1718 0.0352250962313631 24072986 29042893\n",
      "1719 1.4310158765706962 42 12\n",
      "1720 0.751615106779538 25780 61349\n",
      "1721 1.6712498855234967 461 1682\n",
      "1722 6.058532682697002 182 2144\n",
      "1723 0.3844207052090665 221520 119163\n",
      "1724 0.04195447345241525 328360 403001\n",
      "1725 0.2823510192311664 6189827 3638385\n",
      "1726 1.0691237331194332 21130867 7513868\n",
      "1727 0.13962809441711033 184663 268326\n",
      "1728 0.30637885230294826 15561272 8946570\n",
      "1729 0.007673018889131256 9420274 10282670\n",
      "1730 0.16591050275865013 245099 368332\n",
      "1731 0.020429971212133406 14045938 16204139\n",
      "1732 0.06423199463237228 4980686 3865640\n",
      "1733 11.829122559279348 186 5\n",
      "1734 0.33976485233528075 14757869 8239032\n",
      "1735 0.062087717691747306 1595487 1243593\n",
      "1736 1.6913592364230925 53540 14583\n",
      "1737 0.2142971624270152 99624740 62707942\n",
      "1738 37.07465964770066 195333 442\n",
      "1739 0.004188126227959742 30891012 28955192\n",
      "1740 0.03324115007177121 5 4\n",
      "1741 0.09833370635244713 362713 496307\n",
      "1742 6.66987723215794 233974 17682\n",
      "1743 3.0007548241061257 100 570\n",
      "1744 4.561956460214846 4231939 499966\n",
      "1745 0.4516546161963164 327411 167195\n",
      "1746 0.07148016941134619 79922 61172\n",
      "1747 0.04708862425843868 2208301 1777529\n",
      "1748 3.305801003716835 1141116 185225\n",
      "1749 0.22656497264864278 91145 146708\n",
      "1750 4.407563078741075 101870 12481\n",
      "1751 1.7901162383101388 334339 1274246\n",
      "1752 4.044280657727502e-05 20448826 20579284\n",
      "1753 0.3943186432172129 57533 107804\n",
      "1754 0.01283112820696042 834879 935014\n",
      "1755 0.012030859627071804 73302 65687\n",
      "1756 0.10083245872099642 5799523 4221685\n",
      "1757 4.452999750916995 3 32\n",
      "1758 8.124588746976077 293 16\n",
      "1759 1.1644871145482303 7030012 2389476\n",
      "1760 1.9218120556728056 7 1\n",
      "1761 0.340225777222799 3938122 2197708\n",
      "1762 0.022814438465428764 1606976 1381693\n",
      "1763 0.8830338775194019 151 388\n",
      "1764 0.032244789608711855 788535 658924\n",
      "1765 0.9184527193792468 36461814 95071090\n",
      "1766 0.27902747323851784 967190 1640286\n",
      "1767 0.5215087217559762 52070910 25291056\n",
      "1768 1.3633018100207261 44 13\n",
      "1769 0.5843515175757357 1331458 619926\n",
      "1770 0.22674462842823984 15240 9466\n",
      "1771 18.72672348998003 302 3\n",
      "1772 0.36425452262393304 63 34\n",
      "1773 0.10100037751034185 29322 40292\n",
      "1774 0.03982100802500947 2573525 2107968\n",
      "1775 0.1049304434813629 15528633 11231877\n",
      "1776 0.3652034545087809 68251 37295\n",
      "1777 0.27635004482284614 5832701 3447985\n",
      "1778 0.2260276913982274 36 22\n",
      "1779 0.0036311701624267266 170 160\n",
      "1780 4.3172128396027486e-05 27604358 27786331\n",
      "1781 0.16292715628118215 80299 53630\n",
      "1782 0.084027885865964 5867966 4391331\n",
      "1783 0.4469004515924158 2364 1211\n",
      "1784 0.07506614280242911 4142617 3149821\n",
      "1785 0.2192892433648441 2552072 1597791\n",
      "1786 0.5139168225091114 20235 41443\n",
      "1787 0.626153393277811 12383603 27321443\n",
      "1788 0.07749465203292848 5345317 4046459\n",
      "1789 0.754881549871142 2333721 5563923\n",
      "1790 1.3157821703977093 180572 568618\n",
      "1791 11.897815774750772 7170 225735\n",
      "1792 0.5421097034903987 8130012 3893394\n",
      "1793 0.104100322169146 609863 441681\n",
      "1794 0.02045671209740521 11110196 12818510\n",
      "1795 47.236499672925504 10622 10\n",
      "1796 0.030082739245015817 5145479 6120001\n",
      "1797 0.2664794463779952 123 73\n",
      "1798 2.909841055444794 19682431 3574769\n",
      "1799 0.12971685639602873 85911306 59928559\n",
      "1800 0.12987568120338433 12585082 8776954\n",
      "1801 0.1335130529490113 88670234 127781099\n",
      "1802 0.18697850953804615 1505647 977079\n",
      "1803 6.028528437603694 39 465\n",
      "1804 1.1943055223032688 1208412 405134\n",
      "1805 0.08858872865724345 1360899 1832689\n",
      "1806 7.249804040556568 12 191\n",
      "1807 0.8342125203023544 30196 75270\n",
      "1808 0.6904531821942073 74221610 32334029\n",
      "1809 0.04238458587131414 208245 255850\n",
      "1810 3.89978308488485e-05 5306675 5273639\n",
      "1811 17.238973973460535 1588 24\n",
      "1812 0.5978226032665038 5 12\n",
      "1813 0.01729978295138862 1668599 1462951\n",
      "1814 0.11341711568715246 878612 627390\n",
      "1815 0.08956643527487206 14331297 19331237\n",
      "1816 0.004750939951446143 16020960 14953880\n",
      "1817 1.185994842376042 28780 9685\n",
      "1818 9.425696287474318 598815 12901308\n",
      "1819 0.0862052850335603 19590404 14606001\n",
      "1820 1.3653299158319734 132993 427851\n",
      "1821 0.11363224581729922 31541 22515\n",
      "1822 2.579438042968423 2340038 469589\n",
      "1823 0.1569422436990834 1029770 1530348\n",
      "1824 0.2600778413422354 10861 18087\n",
      "1825 0.3445729422923166 82678280 148704378\n",
      "1826 0.8429402773483722 2524938 1008131\n",
      "1827 0.004150418228222727 454750 426377\n",
      "1828 9.602592002394555 4110739 185406\n",
      "1829 1.282542942793791 78704 25360\n",
      "1830 0.5686982336965672 636512 299430\n",
      "1831 0.1582286171110936 930122 624865\n",
      "1832 0.40815434435145576 366472 193456\n",
      "1833 0.36238844559923866 21505374 11778970\n",
      "1834 0.08358176439599231 4326458 3240229\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1835 0.47164756004239483 575483 289583\n",
      "1836 0.1256623855201602 26499913 18590534\n",
      "1837 0.2226301885850928 2336264 1457490\n",
      "1838 0.27200987134054705 977074 579993\n",
      "1839 0.5947090965794948 146508 67755\n",
      "1840 0.038067966893336906 2284255 1879357\n",
      "1841 11.966346575538099 7979 250\n",
      "1842 0.00925744162743184 110495 100359\n",
      "1843 0.0016121113144350323 22356311 21476462\n",
      "1844 0.05699082724737961 748827 589798\n",
      "1845 3.29697515756189 803252 130700\n",
      "1846 0.07196556650367034 152 116\n",
      "1847 0.24993448128294501 604102 366430\n",
      "1848 0.29051722780208133 35 20\n",
      "1849 2.909231344869578 1452080 7993626\n",
      "1850 0.00016649785635932987 77 76\n",
      "1851 1.1620121687512597 36148 106230\n",
      "1852 8.356587493218615 210219 3785490\n",
      "1853 0.001858490545566671 4626862 4830689\n",
      "1854 0.5133966463708661 1881 3852\n",
      "1855 0.18335430007674897 50645452 33004718\n",
      "1856 0.04684470660648148 43436870 34983350\n",
      "1857 0.22215850864376108 171263 274387\n",
      "1858 0.1016347031951023 358 260\n",
      "1859 5.50254607379099 13184 137665\n",
      "1860 0.007678066643753483 184829865 169323515\n",
      "1861 0.025048055400877442 80032 68317\n",
      "1862 3.626120276093034 46 6\n",
      "1863 2.547954198870224 66225663 13421216\n",
      "1864 0.6879686000778579 73806 169167\n",
      "1865 0.02544887549049274 372 317\n",
      "1866 0.8232329865750018 1705455 4225570\n",
      "1867 0.3105298700087543 3308749 5776641\n",
      "1868 0.06108335852899588 7557168 5902323\n",
      "1869 0.0027784095465478334 4210124 3993953\n",
      "1870 0.031111878148778083 357093 299349\n",
      "1871 0.4331239440314795 15499 29932\n",
      "1872 1.6128913430378184 2515220 706355\n",
      "1873 0.021858946450679102 170489916 197655154\n",
      "1874 0.5975732457909282 46939151 101685095\n",
      "1875 0.07089863608049769 176 230\n",
      "1876 1.6804824016179718 902 246\n",
      "1877 0.03286381926565957 10283 12327\n",
      "1878 3.210401995568401 17 2\n",
      "1879 0.6179702578609866 28859 13148\n",
      "1880 0.07646198558591251 4106846 3114715\n",
      "1881 1.594983714934492 3149214 890676\n",
      "1882 0.09846977999501025 132194 180923\n",
      "1883 0.0326086825534797 1000840 835488\n",
      "1884 13.26954967858989 6040 230743\n",
      "1885 4.663554363876724 8207 71138\n",
      "1886 0.12344803296317809 261551 371661\n",
      "1887 2.7837346904827553 20479 108624\n",
      "1888 0.04767557793831735 259 208\n",
      "1889 0.4989679619714549 2142616 4342301\n",
      "1890 0.01624601197531569 14393132 12670688\n",
      "1891 0.06147034170462867 5708595 4455061\n",
      "1892 3.3253443436321124 219632 35459\n",
      "1893 0.8705448679362159 58411 148494\n",
      "1894 4.764952559997289 345 38\n",
      "1895 0.40530182401885134 13506946 7146129\n",
      "1896 15.5972229078626 341765 6584\n",
      "1897 0.2377974270937913 38680546 62990352\n",
      "1898 0.17164095486963848 2954189 1952144\n",
      "1899 0.03439802624453913 171615 206587\n",
      "1900 13.247427461063593 8338670 218974\n",
      "1901 0.006613174915707272 4481945 4131893\n",
      "1902 0.502632746916699 5569399 2740995\n",
      "1903 0.9666993162990732 140674 376026\n",
      "1904 0.637614540919013 19 8\n",
      "1905 0.31316983010658406 6 3\n",
      "1906 0.014967840732603278 3117598 3523328\n",
      "1907 0.017921591907646254 2525730 2209262\n",
      "1908 0.8935510053660958 524 203\n",
      "1909 0.5681719300577368 50 23\n",
      "1910 9.117079530071708 55679 1140327\n",
      "1911 0.6576078155726617 26 11\n",
      "1912 0.1364555082655285 38774414 26798924\n",
      "1913 1.285201666643951 1594791 513284\n",
      "1914 0.10383276143009533 92670 67142\n",
      "1915 0.004890013061137318 1521406 1631604\n",
      "1916 0.021492578645127873 43 37\n",
      "1917 0.1516919307196573 2654070 1797897\n",
      "1918 0.08551306890139605 4280912 3195488\n",
      "1919 1.4427659541003885 235 70\n",
      "1920 6.2124811360138725 444656 36774\n",
      "1921 0.7659873227266218 85343 204771\n",
      "1922 0.22226958981483677 571003 356359\n",
      "1923 4.032232393290014 77 580\n",
      "1924 23.871923141244256 67130 506\n",
      "1925 0.09162019177028942 1542795 1139862\n",
      "1926 0.16171613345387625 208926 312349\n",
      "1927 0.0023561927810569288 12235484 11655750\n",
      "1928 0.14026376567287208 10597404 7286993\n",
      "1929 0.290697627756234 34859 59769\n",
      "1930 7.705700729869414 4833773 77598020\n",
      "1931 0.5487168245242127 1337789 637796\n",
      "1932 10.808677636359997 1338 49\n",
      "1933 0.31053245003599894 293997 513282\n",
      "1934 0.18962710132471683 1165917 754308\n",
      "1935 0.21972091791205062 703402 440180\n",
      "1936 1.4168252094569813 4435774 1349045\n",
      "1937 38.3803688843447 1098950 2240\n",
      "1938 0.4189163545534895 77 148\n",
      "1939 0.8452281987334944 129 325\n",
      "1940 2.913642791877596 11187 61668\n",
      "1941 1.857544180538998 148815 581509\n",
      "1942 12.51909051155135 14886 512202\n",
      "1943 0.974770567612381 14769 5502\n",
      "1944 0.42201104589504634 2030 3888\n",
      "1945 0.15982307771046356 2527841 3770262\n",
      "1946 0.283456287597123 7187412 4220379\n",
      "1947 3.9713235076860394 1000160 7337358\n",
      "1948 4.12201789293528 1396449 183352\n",
      "1949 0.01981033524836375 691390 795884\n",
      "1950 0.07538429910104963 3786048 4982266\n",
      "1951 1.4732243233764348 2941 873\n",
      "1952 2.079447906165934 152849 36140\n",
      "1953 2.017298814088459 178082 736983\n",
      "1954 0.7862907987695983 494122582 203578815\n",
      "1955 0.242300096426041 11140 6809\n",
      "1956 6.876463167046166 163693 11889\n",
      "1957 1.0293721498011668 10512 28996\n",
      "1958 0.0023389676267810775 447258 426142\n",
      "1959 3.0805690886843298 1971720 340871\n",
      "1960 2.2065347262453066 1016 4491\n",
      "1961 0.004653761053234241 14148403 13215406\n",
      "1962 39.4432034153577 969196 1814\n",
      "1963 0.0074461689554365116 3169864 2907802\n",
      "1964 0.01359612701708186 3907448 3477390\n",
      "1965 5.824583895848837 2153434 24058339\n",
      "1966 0.78585842847938 51926902 21399133\n",
      "1967 0.07750886293490344 21706 16431\n",
      "1968 0.5603276822694793 41927 88632\n",
      "1969 49.74622138971648 2633209 2276\n",
      "1970 0.7200008355450157 115 270\n",
      "1971 18.31089775498783 9332432 129298\n",
      "1972 0.05276401776325474 2844511 2260728\n",
      "1973 0.12220531806379291 608446 428945\n",
      "1974 0.2359505864961791 53948788 33191299\n",
      "1975 2.0666582672673814 246 1039\n",
      "1976 3.2952776629774956 85 13\n",
      "1977 8.806420642515134 8 174\n",
      "1978 6.056096983141194 1978 23184\n",
      "1979 2.3569147062489284 5262402 1133559\n",
      "1980 0.03361039004122937 29764166 35753085\n",
      "1981 16.27788943930456 357 20233\n",
      "1982 0.43351317084035534 162770 314429\n",
      "1983 0.3194747464684607 555246 977143\n",
      "1984 4.7654630901109 345 3069\n",
      "1985 2.653318759986479e-05 11492881 11552234\n",
      "1986 0.3604569104442446 3199 5832\n",
      "1987 0.06968717042161489 3745249 2876300\n",
      "1988 1.770362743212858 454030 1717654\n",
      "1989 0.0006747258938859834 151 155\n",
      "1990 0.8722821548600791 37987971 96662936\n",
      "1991 1.5950577950624063 49971827 14132871\n",
      "1992 1.1198012946455282 27381 78892\n",
      "1993 2.0494589180304135 14924 3565\n",
      "1994 0.10149131087647693 347811 252922\n",
      "1995 3.5036433894535364 3990 613\n",
      "1996 0.07063324158177858 4592749 3520875\n",
      "1997 0.7920410610025553 815 1986\n",
      "1998 2.767692623912904 833 157\n",
      "1999 0.02738097220797263 840095 711975\n",
      "2000 1.980895446264765 874777 214117\n",
      "2.3926106037676362\n"
     ]
    }
   ],
   "source": [
    "\n",
    "test_label = {}\n",
    "with open('testlabel.csv', 'r') as csvfile:\n",
    "    Data = csv.reader(csvfile, delimiter = '#')\n",
    "    for query in Data : test_label[query[0] + query[1] + query[2]] = int(query[3])\n",
    "\n",
    "Y_real = []\n",
    "with open('test_without_label.csv', 'r') as csvfile:\n",
    "    Data = csv.reader(csvfile, delimiter = '#')\n",
    "    for query in Data : Y_real.append(test_label[query[0] + query[1] + query[2]])\n",
    "        \n",
    "qerror = 0\n",
    "for i in range(len(Y_real)) :\n",
    "    y_hat = int(math.exp(Y_test[i]))\n",
    "    y = Y_real[i]\n",
    "    qerror += (math.log((y+1)/(y_hat+1)))**2\n",
    "    print(str(i+1) + \" \" + str((math.log((y+1)/(y_hat+1)))**2) + \" \" + str(y_hat) + \" \" + str(y))\n",
    "print(qerror / len(Y_real))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
