{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "import csv\n",
    "import math\n",
    "# from sklearn import ensemble\n",
    "# from sklearn import datasets\n",
    "# from sklearn.utils import shuffle\n",
    "# from sklearn.metrics import mean_squared_error\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import matplotlib.pyplot as plt\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn.neural_network import MLPRegressor\n",
    "from sklearn.neural_network import MLPClassifier\n",
    "from sklearn.metrics import accuracy_score\n",
    "from sklearn.tree import DecisionTreeClassifier\n",
    "from sklearn.ensemble import BaggingClassifier\n",
    "from sklearn.ensemble import RandomForestClassifier    \n",
    "from sklearn.ensemble import AdaBoostClassifier\n",
    "from sklearn.ensemble import GradientBoostingClassifier\n",
    "from sklearn.ensemble import GradientBoostingRegressor\n",
    "from sklearn.model_selection import KFold\n",
    "#导入GridSearch\n",
    "from sklearn.model_selection import GridSearchCV\n",
    "#使用随机森林作为模型\n",
    "\n",
    "'''\n",
    "movie_id\n",
    "keyword_id\n",
    "id\n",
    "info_type_id\n",
    "role_id\n",
    "person_id\n",
    "company_type_id\n",
    "company_id\n",
    "production_year\n",
    "kind_id\n",
    "'''\n",
    "Cnt = 0;\n",
    "\n",
    "Dict_table = {}\n",
    "Num_table = {}\n",
    "\n",
    "Dict_table['t'] = 0\n",
    "Dict_table['mc'] = 1\n",
    "Dict_table['ci'] = 2\n",
    "Dict_table['mi'] = 3\n",
    "Dict_table['mi_idx'] = 4\n",
    "Dict_table['mk'] = 5\n",
    "\n",
    "Dict_attr = {}\n",
    "Dict_attr['t.production_year'] = 0\n",
    "Dict_attr['mi.info_type_id'] = 1\n",
    "Dict_attr['t.kind_id'] = 2\n",
    "Dict_attr['mi_idx.info_type_id'] = 3\n",
    "Dict_attr['mk.keyword_id'] = 4\n",
    "Dict_attr['ci.person_id'] = 5\n",
    "Dict_attr['mc.company_id'] = 6\n",
    "Dict_attr['mc.company_type_id'] = 7\n",
    "Dict_attr['ci.role_id'] = 8\n",
    "\n",
    "Dict_join = {}\n",
    "Dict_join[''] = 0\n",
    "Dict_join['t.id=mc.movie_id'] = 1\n",
    "Dict_join['t.id=mi.movie_id'] = 2\n",
    "Dict_join['t.id=mi_idx.movie_id'] = 3\n",
    "Dict_join['t.id=ci.movie_id'] = 4\n",
    "Dict_join['t.id=mk.movie_id'] = 5\n",
    "\n",
    "\n",
    "# t.production_year\n",
    "# mi.info_type_id\n",
    "# t.kind_id\n",
    "# mi_idx.info_type_id\n",
    "# mk.keyword_id\n",
    "# ci.person_id\n",
    "# mc.company_id\n",
    "# mc.company_type_id\n",
    "# ci.role_id\n",
    "\n",
    "Dict_comp = {}\n",
    "Dict_comp['>'] = 0\n",
    "Dict_comp['='] = 1\n",
    "Dict_comp['<'] = 2\n",
    "\n",
    "Dict_max = {}\n",
    "Dict_min = {}\n",
    "\n",
    "with open('column_min_max_vals.csv', 'r') as csvfile:\n",
    "    Max_list = csv.reader(csvfile)\n",
    "    for Max_data in Max_list:\n",
    "        if (Max_data[0] == \"name\"):\n",
    "            continue\n",
    "        Dict_max[Max_data[0]] = Max_data[2]\n",
    "        Dict_min[Max_data[0]] = Max_data[1]\n",
    "        Num_table[Max_data[0].split('.')[0]] = 1/int(Max_data[3])\n",
    "\n",
    "# def Getkey(element):\n",
    "#     return Dict_table[element[0].split('.')[0]];\n",
    "# # data load and feature abstract\n",
    "\n",
    "def Parse(Data, Is_train):\n",
    "    \n",
    "    Feature_vec = []\n",
    "    Feature_target = []\n",
    "    \n",
    "    for Sql in Data:\n",
    "        #\n",
    "        Table_name = []\n",
    "        Join_table = []\n",
    "        Condition = []\n",
    "        Target = 0\n",
    "#         Cnt = Cnt + 1\n",
    "#         if (Cnt > 10):\n",
    "#             break\n",
    "        Table_name = Sql[0].split(',')\n",
    "        Join_table = Sql[1].split(',')\n",
    "\n",
    "        Cur_cond = Sql[2].split(',')\n",
    "        Cur_Size = len(Cur_cond)\n",
    "        Cur_Cnt = 0\n",
    "        while (Cur_Cnt < Cur_Size):\n",
    "            Cur_list = []\n",
    "            Cur_list.append(Cur_cond[Cur_Cnt])\n",
    "            Cur_list.append(Cur_cond[Cur_Cnt+1])\n",
    "            Cur_list.append(Cur_cond[Cur_Cnt+2])\n",
    "            Condition.append(Cur_list)\n",
    "            Cur_Cnt = Cur_Cnt + 3\n",
    "        \n",
    "        if (Is_train == 1):\n",
    "            Target = int(Sql[3])\n",
    "\n",
    "        # Create vec\n",
    "        Cur_vec = [0.0] * (18 + 12 + 6 * (9 + 3 + 1))\n",
    "#        for i in range(48):\n",
    "#            Cur_vec.append(0.0)\n",
    "        \n",
    "        pos = 0\n",
    "        card_sum = 0\n",
    "        for Cur_table_name in Table_name:\n",
    "            card_sum += Num_table[Cur_table_name.split(' ')[-1]]\n",
    "        for Cur_table_name in Table_name:\n",
    "            table_name = Cur_table_name.split(' ')[-1]\n",
    "            Cur_vec[pos + Dict_table[table_name]] = 1#len(Table_name) + Num_table[table_name]/card_sum\n",
    "            pos += 6\n",
    "            \n",
    "        pos = 18\n",
    "\n",
    "        for Cur_join in Join_table:\n",
    "            Cur_vec[Dict_join[Cur_join] + pos] = 1\n",
    "            pos = pos + 6\n",
    "            \n",
    "        pos = 30\n",
    "        \n",
    "        for i in Condition:\n",
    "            offset = 0#Dict_attr[i[0]] * 13;\n",
    "            \n",
    "            Cur_vec[pos + Dict_attr[i[0]]] = 1 \n",
    "            Cur_vec[pos + 9 + Dict_comp[i[1]]] = 1 \n",
    "            table_name = (i[0].split('.'))[0]\n",
    "            Cur_vec[pos + 12] = (1.0 * int(i[2]) - 1.0* int(Dict_min[i[0]]) ) / (1.0 * int(Dict_max[i[0]]) - 1.0 * int(Dict_min[i[0]]))\n",
    "            \n",
    "            pos += 13\n",
    "        \n",
    "#         if (Is_train == 0):\n",
    "#             print (pos)\n",
    "#             print (Sql)\n",
    "#             print (Cur_vec)\n",
    "#             print(\"\")\n",
    "        Feature_vec.append(Cur_vec)\n",
    "    \n",
    "        MAX = 5e8\n",
    "        \n",
    "        if (Is_train == 1):\n",
    "            cur = 1\n",
    "            e = math.exp(1)\n",
    "            vec = []\n",
    "            while (cur < MAX) :\n",
    "                if (Target >= cur and Target < cur * e) : \n",
    "                    vec.append(1.0)\n",
    "                else :\n",
    "                    vec.append(0.0)\n",
    "                cur *= e\n",
    "            Feature_target.append(math.log(1.0*Target))   \n",
    "           # Feature_target.append(vec)\n",
    "#     print (Feature_target)\n",
    "    return [Feature_vec, Feature_target]\n",
    "\n",
    "X_Data = []\n",
    "Y_Data = []\n",
    "\n",
    "with open('train.csv', 'r') as csvfile:\n",
    "    Data = csv.reader(csvfile, delimiter = '#')\n",
    "    X_Data = Parse(Data, 1)\n",
    "\n",
    "with open('test_without_label.csv', 'r') as csvfile:\n",
    "    Data = csv.reader(csvfile, delimiter = '#')\n",
    "    Y_Data = Parse(Data, 0)\n",
    "\n",
    "X_train = X_Data[0]\n",
    "Y_train = X_Data[1]\n",
    "#Y_train = [x/max(X_Data[1]) for x in Y_train]\n",
    "\n",
    "X_test = Y_Data[0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 1, loss = 17.34413991\n",
      "Iteration 2, loss = 4.81108325\n",
      "Iteration 3, loss = 3.99025170\n",
      "Iteration 4, loss = 3.48516282\n",
      "Iteration 5, loss = 3.17207584\n",
      "Iteration 6, loss = 2.96830974\n",
      "Iteration 7, loss = 2.83075395\n",
      "Iteration 8, loss = 2.69034930\n",
      "Iteration 9, loss = 2.57469532\n",
      "Iteration 10, loss = 2.47597032\n",
      "Iteration 11, loss = 2.36648903\n",
      "Iteration 12, loss = 2.26390491\n",
      "Iteration 13, loss = 2.16911235\n",
      "Iteration 14, loss = 2.06642445\n",
      "Iteration 15, loss = 1.95991784\n",
      "Iteration 16, loss = 1.89489116\n",
      "Iteration 17, loss = 1.79394036\n",
      "Iteration 18, loss = 1.70987097\n",
      "Iteration 19, loss = 1.65460549\n",
      "Iteration 20, loss = 1.59381989\n",
      "Iteration 21, loss = 1.53437986\n",
      "Iteration 22, loss = 1.48615855\n",
      "Iteration 23, loss = 1.45080592\n",
      "Iteration 24, loss = 1.42201546\n",
      "Iteration 25, loss = 1.36687220\n",
      "Iteration 26, loss = 1.34059555\n",
      "Iteration 27, loss = 1.30800157\n",
      "Iteration 28, loss = 1.28430491\n",
      "Iteration 29, loss = 1.25357984\n",
      "Iteration 30, loss = 1.22279296\n",
      "Iteration 31, loss = 1.21004715\n",
      "Iteration 32, loss = 1.18721895\n",
      "Iteration 33, loss = 1.16427648\n",
      "Iteration 34, loss = 1.14510960\n",
      "Iteration 35, loss = 1.12852032\n",
      "Iteration 36, loss = 1.11484613\n",
      "Iteration 37, loss = 1.10396849\n",
      "Iteration 38, loss = 1.08371476\n",
      "Iteration 39, loss = 1.07284371\n",
      "Iteration 40, loss = 1.05061446\n",
      "Iteration 41, loss = 1.04336428\n",
      "Iteration 42, loss = 1.02692910\n",
      "Iteration 43, loss = 1.00674169\n",
      "Iteration 44, loss = 0.99887882\n",
      "Iteration 45, loss = 0.99145348\n",
      "Iteration 46, loss = 0.98688572\n",
      "Iteration 47, loss = 0.96711701\n",
      "Iteration 48, loss = 0.97638573\n",
      "Iteration 49, loss = 0.94998269\n",
      "Iteration 50, loss = 0.94430569\n",
      "Iteration 51, loss = 0.93614882\n",
      "Iteration 52, loss = 0.92356995\n",
      "Iteration 53, loss = 0.91284192\n",
      "Iteration 54, loss = 0.90740228\n",
      "Iteration 55, loss = 0.90320911\n",
      "Iteration 56, loss = 0.88305766\n",
      "Iteration 57, loss = 0.88375873\n",
      "Iteration 58, loss = 0.88537813\n",
      "Iteration 59, loss = 0.86959323\n",
      "Iteration 60, loss = 0.86095113\n",
      "Iteration 61, loss = 0.85862455\n",
      "Iteration 62, loss = 0.84713430\n",
      "Iteration 63, loss = 0.85146948\n",
      "Iteration 64, loss = 0.83173170\n",
      "Iteration 65, loss = 0.83573735\n",
      "Iteration 66, loss = 0.83090860\n",
      "Iteration 67, loss = 0.82310612\n",
      "Iteration 68, loss = 0.81577174\n",
      "Iteration 69, loss = 0.80258645\n",
      "Iteration 70, loss = 0.81097131\n",
      "Iteration 71, loss = 0.79804558\n",
      "Iteration 72, loss = 0.79097033\n",
      "Iteration 73, loss = 0.78689312\n",
      "Iteration 74, loss = 0.77903887\n",
      "Iteration 75, loss = 0.77587088\n",
      "Iteration 76, loss = 0.77216505\n",
      "Iteration 77, loss = 0.76687693\n",
      "Iteration 78, loss = 0.77140280\n",
      "Iteration 79, loss = 0.75774126\n",
      "Iteration 80, loss = 0.75667286\n",
      "Iteration 81, loss = 0.74601706\n",
      "Iteration 82, loss = 0.74759342\n",
      "Iteration 83, loss = 0.75103707\n",
      "Iteration 84, loss = 0.73695681\n",
      "Iteration 85, loss = 0.74512039\n",
      "Iteration 86, loss = 0.77492163\n",
      "Iteration 87, loss = 0.75971883\n",
      "Iteration 88, loss = 0.73094684\n",
      "Iteration 89, loss = 0.72312321\n",
      "Iteration 90, loss = 0.72146899\n",
      "Iteration 91, loss = 0.71864767\n",
      "Iteration 92, loss = 0.71910457\n",
      "Iteration 93, loss = 0.71232705\n",
      "Iteration 94, loss = 0.71313591\n",
      "Iteration 95, loss = 0.71072025\n",
      "Iteration 96, loss = 0.70091968\n",
      "Iteration 97, loss = 0.70598407\n",
      "Iteration 98, loss = 0.69723160\n",
      "Iteration 99, loss = 0.69691671\n",
      "Iteration 100, loss = 0.69748365\n",
      "Iteration 101, loss = 0.69042186\n",
      "Iteration 102, loss = 0.68650210\n",
      "Iteration 103, loss = 0.68762876\n",
      "Iteration 104, loss = 0.68004727\n",
      "Iteration 105, loss = 0.67573547\n",
      "Iteration 106, loss = 0.67391348\n",
      "Iteration 107, loss = 0.67344186\n",
      "Iteration 108, loss = 0.66960277\n",
      "Iteration 109, loss = 0.67680123\n",
      "Iteration 110, loss = 0.67033300\n",
      "Iteration 111, loss = 0.66396430\n",
      "Iteration 112, loss = 0.65720571\n",
      "Iteration 113, loss = 0.65787592\n",
      "Iteration 114, loss = 0.65255391\n",
      "Iteration 115, loss = 0.65757994\n",
      "Iteration 116, loss = 0.65681572\n",
      "Iteration 117, loss = 0.65038427\n",
      "Iteration 118, loss = 0.65249478\n",
      "Iteration 119, loss = 0.64899927\n",
      "Iteration 120, loss = 0.64416277\n",
      "Iteration 121, loss = 0.64253977\n",
      "Iteration 122, loss = 0.63707434\n",
      "Iteration 123, loss = 0.63900336\n",
      "Iteration 124, loss = 0.63526438\n",
      "Iteration 125, loss = 0.63390515\n",
      "Iteration 126, loss = 0.63129283\n",
      "Iteration 127, loss = 0.62352613\n",
      "Iteration 128, loss = 0.63549270\n",
      "Iteration 129, loss = 0.62074490\n",
      "Iteration 130, loss = 0.61634341\n",
      "Iteration 131, loss = 0.61866200\n",
      "Iteration 132, loss = 0.61412050\n",
      "Iteration 133, loss = 0.62142232\n",
      "Iteration 134, loss = 0.61548007\n",
      "Iteration 135, loss = 0.61338055\n",
      "Iteration 136, loss = 0.60618123\n",
      "Iteration 137, loss = 0.60718902\n",
      "Iteration 138, loss = 0.61018638\n",
      "Iteration 139, loss = 0.60522038\n",
      "Iteration 140, loss = 0.60909182\n",
      "Iteration 141, loss = 0.60613476\n",
      "Iteration 142, loss = 0.59687350\n",
      "Iteration 143, loss = 0.59967783\n",
      "Iteration 144, loss = 0.59991693\n",
      "Iteration 145, loss = 0.59747356\n",
      "Iteration 146, loss = 0.59517519\n",
      "Iteration 147, loss = 0.59521741\n",
      "Iteration 148, loss = 0.59294221\n",
      "Iteration 149, loss = 0.58542972\n",
      "Iteration 150, loss = 0.58979055\n",
      "Iteration 151, loss = 0.58980968\n",
      "Iteration 152, loss = 0.58839203\n",
      "Iteration 153, loss = 0.58498491\n",
      "Iteration 154, loss = 0.58019971\n",
      "Iteration 155, loss = 0.57907745\n",
      "Iteration 156, loss = 0.57098177\n",
      "Iteration 157, loss = 0.58314347\n",
      "Iteration 158, loss = 0.57510861\n",
      "Iteration 159, loss = 0.57576413\n",
      "Iteration 160, loss = 0.57569717\n",
      "Iteration 161, loss = 0.57009618\n",
      "Iteration 162, loss = 0.56443573\n",
      "Iteration 163, loss = 0.57662055\n",
      "Iteration 164, loss = 0.56723100\n",
      "Iteration 165, loss = 0.56202631\n",
      "Iteration 166, loss = 0.57603808\n",
      "Iteration 167, loss = 0.56305611\n",
      "Iteration 168, loss = 0.56014542\n",
      "Iteration 169, loss = 0.56750074\n",
      "Iteration 170, loss = 0.55409765\n",
      "Iteration 171, loss = 0.55534712\n",
      "Iteration 172, loss = 0.55676178\n",
      "Iteration 173, loss = 0.55209725\n",
      "Iteration 174, loss = 0.55885869\n",
      "Iteration 175, loss = 0.55174659\n",
      "Iteration 176, loss = 0.54690572\n",
      "Iteration 177, loss = 0.55080012\n",
      "Iteration 178, loss = 0.55006937\n",
      "Iteration 179, loss = 0.55458560\n",
      "Iteration 180, loss = 0.54294641\n",
      "Iteration 181, loss = 0.54872461\n",
      "Iteration 182, loss = 0.53944153\n",
      "Iteration 183, loss = 0.55093664\n",
      "Iteration 184, loss = 0.54080684\n",
      "Iteration 185, loss = 0.54082180\n",
      "Iteration 186, loss = 0.53960613\n",
      "Iteration 187, loss = 0.53319583\n",
      "Iteration 188, loss = 0.53563141\n",
      "Iteration 189, loss = 0.53633152\n",
      "Iteration 190, loss = 0.53126962\n",
      "Iteration 191, loss = 0.53407003\n",
      "Iteration 192, loss = 0.53139334\n",
      "Iteration 193, loss = 0.53065123\n",
      "Iteration 194, loss = 0.52915472\n",
      "Iteration 195, loss = 0.52933713\n",
      "Iteration 196, loss = 0.52975114\n",
      "Iteration 197, loss = 0.53218800\n",
      "Iteration 198, loss = 0.52129188\n",
      "Iteration 199, loss = 0.52585177\n",
      "Iteration 200, loss = 0.52980576\n",
      "Iteration 201, loss = 0.52830244\n",
      "Iteration 202, loss = 0.51820773\n",
      "Iteration 203, loss = 0.52111932\n",
      "Iteration 204, loss = 0.51323070\n",
      "Iteration 205, loss = 0.52027113\n",
      "Iteration 206, loss = 0.52787837\n",
      "Iteration 207, loss = 0.51930594\n",
      "Iteration 208, loss = 0.51729471\n",
      "Iteration 209, loss = 0.51239077\n",
      "Iteration 210, loss = 0.51781758\n",
      "Iteration 211, loss = 0.51299807\n",
      "Iteration 212, loss = 0.51329329\n",
      "Iteration 213, loss = 0.50767958\n",
      "Iteration 214, loss = 0.51619783\n",
      "Iteration 215, loss = 0.51051242\n",
      "Iteration 216, loss = 0.51289029\n",
      "Iteration 217, loss = 0.50922332\n",
      "Iteration 218, loss = 0.51157013\n",
      "Iteration 219, loss = 0.50753289\n",
      "Iteration 220, loss = 0.50920527\n",
      "Iteration 221, loss = 0.49771203\n",
      "Iteration 222, loss = 0.50587781\n",
      "Iteration 223, loss = 0.51150105\n",
      "Iteration 224, loss = 0.50242557\n",
      "Iteration 225, loss = 0.50105109\n",
      "Iteration 226, loss = 0.49991690\n",
      "Iteration 227, loss = 0.49972565\n",
      "Iteration 228, loss = 0.49892700\n",
      "Iteration 229, loss = 0.49309404\n",
      "Iteration 230, loss = 0.50073010\n",
      "Iteration 231, loss = 0.49800334\n",
      "Iteration 232, loss = 0.49703062\n",
      "Iteration 233, loss = 0.49258637\n",
      "Iteration 234, loss = 0.49869993\n",
      "Iteration 235, loss = 0.49578888\n",
      "Iteration 236, loss = 0.48944705\n",
      "Iteration 237, loss = 0.49350713\n",
      "Iteration 238, loss = 0.48872070\n",
      "Iteration 239, loss = 0.49305097\n",
      "Iteration 240, loss = 0.49614254\n",
      "Iteration 241, loss = 0.48575544\n",
      "Iteration 242, loss = 0.49025479\n",
      "Iteration 243, loss = 0.48722546\n",
      "Iteration 244, loss = 0.48758859\n",
      "Iteration 245, loss = 0.48175515\n",
      "Iteration 246, loss = 0.48458627\n",
      "Iteration 247, loss = 0.48762973\n",
      "Iteration 248, loss = 0.48192536\n",
      "Iteration 249, loss = 0.48657261\n",
      "Iteration 250, loss = 0.48268639\n",
      "Iteration 251, loss = 0.48221086\n",
      "Iteration 252, loss = 0.47853969\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 253, loss = 0.47389789\n",
      "Iteration 254, loss = 0.48754924\n",
      "Iteration 255, loss = 0.47968432\n",
      "Iteration 256, loss = 0.47989223\n",
      "Iteration 257, loss = 0.47702867\n",
      "Iteration 258, loss = 0.46878581\n",
      "Iteration 259, loss = 0.47646627\n",
      "Iteration 260, loss = 0.47751141\n",
      "Iteration 261, loss = 0.47606457\n",
      "Iteration 262, loss = 0.47683624\n",
      "Iteration 263, loss = 0.47239505\n",
      "Iteration 264, loss = 0.47729746\n",
      "Iteration 265, loss = 0.46966291\n",
      "Iteration 266, loss = 0.47300221\n",
      "Iteration 267, loss = 0.47188296\n",
      "Iteration 268, loss = 0.47042798\n",
      "Iteration 269, loss = 0.46558467\n",
      "Iteration 270, loss = 0.47140499\n",
      "Iteration 271, loss = 0.45810253\n",
      "Iteration 272, loss = 0.46499954\n",
      "Iteration 273, loss = 0.46489109\n",
      "Iteration 274, loss = 0.46747936\n",
      "Iteration 275, loss = 0.46654963\n",
      "Iteration 276, loss = 0.45669163\n",
      "Iteration 277, loss = 0.46667348\n",
      "Iteration 278, loss = 0.46854857\n",
      "Iteration 279, loss = 0.47372049\n",
      "Iteration 280, loss = 0.45593017\n",
      "Iteration 281, loss = 0.45650778\n",
      "Iteration 282, loss = 0.46646991\n",
      "Iteration 283, loss = 0.46108492\n",
      "Iteration 284, loss = 0.45805398\n",
      "Iteration 285, loss = 0.45542963\n",
      "Iteration 286, loss = 0.45738717\n",
      "Iteration 287, loss = 0.45225509\n",
      "Iteration 288, loss = 0.45395864\n",
      "Iteration 289, loss = 0.45022521\n",
      "Iteration 290, loss = 0.45956045\n",
      "Iteration 291, loss = 0.45299681\n",
      "Iteration 292, loss = 0.45552925\n",
      "Iteration 293, loss = 0.45865636\n",
      "Iteration 294, loss = 0.45574890\n",
      "Iteration 295, loss = 0.45264080\n",
      "Iteration 296, loss = 0.45902989\n",
      "Iteration 297, loss = 0.45439978\n",
      "Iteration 298, loss = 0.45124502\n",
      "Iteration 299, loss = 0.45433625\n",
      "Iteration 300, loss = 0.44597242\n",
      "Iteration 301, loss = 0.44776861\n",
      "Iteration 302, loss = 0.45108445\n",
      "Iteration 303, loss = 0.45230299\n",
      "Iteration 304, loss = 0.45215425\n",
      "Iteration 305, loss = 0.44514337\n",
      "Iteration 306, loss = 0.45283609\n",
      "Iteration 307, loss = 0.44917428\n",
      "Iteration 308, loss = 0.45274586\n",
      "Iteration 309, loss = 0.44696753\n",
      "Iteration 310, loss = 0.43945346\n",
      "Iteration 311, loss = 0.44691371\n",
      "Iteration 312, loss = 0.44577698\n",
      "Iteration 313, loss = 0.44173751\n",
      "Iteration 314, loss = 0.44312539\n",
      "Iteration 315, loss = 0.44270870\n",
      "Iteration 316, loss = 0.43833167\n",
      "Iteration 317, loss = 0.44378128\n",
      "Iteration 318, loss = 0.44259608\n",
      "Iteration 319, loss = 0.45013202\n",
      "Iteration 320, loss = 0.44326267\n",
      "Iteration 321, loss = 0.44175683\n",
      "Iteration 322, loss = 0.44688963\n",
      "Iteration 323, loss = 0.45102760\n",
      "Iteration 324, loss = 0.44393216\n",
      "Iteration 325, loss = 0.43640681\n",
      "Iteration 326, loss = 0.43908491\n",
      "Iteration 327, loss = 0.43422899\n",
      "Iteration 328, loss = 0.44035782\n",
      "Iteration 329, loss = 0.44380917\n",
      "Iteration 330, loss = 0.42914545\n",
      "Iteration 331, loss = 0.42891545\n",
      "Iteration 332, loss = 0.43907657\n",
      "Iteration 333, loss = 0.43531722\n",
      "Iteration 334, loss = 0.43214278\n",
      "Iteration 335, loss = 0.43607646\n",
      "Iteration 336, loss = 0.43740894\n",
      "Iteration 337, loss = 0.43166378\n",
      "Iteration 338, loss = 0.42787242\n",
      "Iteration 339, loss = 0.43591260\n",
      "Iteration 340, loss = 0.42945665\n",
      "Iteration 341, loss = 0.43077669\n",
      "Iteration 342, loss = 0.42802585\n",
      "Iteration 343, loss = 0.42838057\n",
      "Iteration 344, loss = 0.42865914\n",
      "Iteration 345, loss = 0.42616844\n",
      "Iteration 346, loss = 0.42516165\n",
      "Iteration 347, loss = 0.42948517\n",
      "Iteration 348, loss = 0.42680187\n",
      "Iteration 349, loss = 0.42859280\n",
      "Iteration 350, loss = 0.41995159\n",
      "Iteration 351, loss = 0.42974364\n",
      "Iteration 352, loss = 0.42487282\n",
      "Iteration 353, loss = 0.43213559\n",
      "Iteration 354, loss = 0.42072833\n",
      "Iteration 355, loss = 0.42208327\n",
      "Iteration 356, loss = 0.42733417\n",
      "Iteration 357, loss = 0.42686459\n",
      "Iteration 358, loss = 0.42086881\n",
      "Iteration 359, loss = 0.42267209\n",
      "Iteration 360, loss = 0.42150622\n",
      "Iteration 361, loss = 0.41741322\n",
      "Iteration 362, loss = 0.42106364\n",
      "Iteration 363, loss = 0.42131990\n",
      "Iteration 364, loss = 0.41890401\n",
      "Iteration 365, loss = 0.42248255\n",
      "Iteration 366, loss = 0.41683390\n",
      "Iteration 367, loss = 0.41609291\n",
      "Iteration 368, loss = 0.41779254\n",
      "Iteration 369, loss = 0.41221712\n",
      "Iteration 370, loss = 0.41604151\n",
      "Iteration 371, loss = 0.41965509\n",
      "Iteration 372, loss = 0.42641672\n",
      "Iteration 373, loss = 0.41383502\n",
      "Iteration 374, loss = 0.42100160\n",
      "Iteration 375, loss = 0.41947560\n",
      "Iteration 376, loss = 0.41970059\n",
      "Iteration 377, loss = 0.41906675\n",
      "Iteration 378, loss = 0.41735626\n",
      "Iteration 379, loss = 0.41693168\n",
      "Iteration 380, loss = 0.41758527\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.9853550977325836\n",
      "Iteration 1, loss = 17.41392291\n",
      "Iteration 2, loss = 4.90436514\n",
      "Iteration 3, loss = 4.06984447\n",
      "Iteration 4, loss = 3.56133207\n",
      "Iteration 5, loss = 3.24936134\n",
      "Iteration 6, loss = 3.04125828\n",
      "Iteration 7, loss = 2.91080482\n",
      "Iteration 8, loss = 2.76650101\n",
      "Iteration 9, loss = 2.64192046\n",
      "Iteration 10, loss = 2.53382918\n",
      "Iteration 11, loss = 2.42528036\n",
      "Iteration 12, loss = 2.31193564\n",
      "Iteration 13, loss = 2.21411972\n",
      "Iteration 14, loss = 2.09466838\n",
      "Iteration 15, loss = 1.98396174\n",
      "Iteration 16, loss = 1.90779075\n",
      "Iteration 17, loss = 1.81630973\n",
      "Iteration 18, loss = 1.73848336\n",
      "Iteration 19, loss = 1.67766939\n",
      "Iteration 20, loss = 1.61934465\n",
      "Iteration 21, loss = 1.56525506\n",
      "Iteration 22, loss = 1.51386580\n",
      "Iteration 23, loss = 1.47970885\n",
      "Iteration 24, loss = 1.44821350\n",
      "Iteration 25, loss = 1.39541938\n",
      "Iteration 26, loss = 1.36732074\n",
      "Iteration 27, loss = 1.33603885\n",
      "Iteration 28, loss = 1.31305590\n",
      "Iteration 29, loss = 1.28022347\n",
      "Iteration 30, loss = 1.25095438\n",
      "Iteration 31, loss = 1.23941889\n",
      "Iteration 32, loss = 1.21532691\n",
      "Iteration 33, loss = 1.19396710\n",
      "Iteration 34, loss = 1.17386791\n",
      "Iteration 35, loss = 1.15584827\n",
      "Iteration 36, loss = 1.13515763\n",
      "Iteration 37, loss = 1.12696641\n",
      "Iteration 38, loss = 1.10713136\n",
      "Iteration 39, loss = 1.09284738\n",
      "Iteration 40, loss = 1.07558967\n",
      "Iteration 41, loss = 1.07079779\n",
      "Iteration 42, loss = 1.05030766\n",
      "Iteration 43, loss = 1.03570573\n",
      "Iteration 44, loss = 1.02503665\n",
      "Iteration 45, loss = 1.01286896\n",
      "Iteration 46, loss = 1.01065418\n",
      "Iteration 47, loss = 0.99113976\n",
      "Iteration 48, loss = 0.99372367\n",
      "Iteration 49, loss = 0.97667253\n",
      "Iteration 50, loss = 0.96942446\n",
      "Iteration 51, loss = 0.96015582\n",
      "Iteration 52, loss = 0.95041231\n",
      "Iteration 53, loss = 0.93692480\n",
      "Iteration 54, loss = 0.93355724\n",
      "Iteration 55, loss = 0.92465713\n",
      "Iteration 56, loss = 0.90848664\n",
      "Iteration 57, loss = 0.90769778\n",
      "Iteration 58, loss = 0.90272092\n",
      "Iteration 59, loss = 0.89725540\n",
      "Iteration 60, loss = 0.88586093\n",
      "Iteration 61, loss = 0.88456877\n",
      "Iteration 62, loss = 0.87150827\n",
      "Iteration 63, loss = 0.87826507\n",
      "Iteration 64, loss = 0.85995932\n",
      "Iteration 65, loss = 0.85816296\n",
      "Iteration 66, loss = 0.85976866\n",
      "Iteration 67, loss = 0.84288751\n",
      "Iteration 68, loss = 0.84425901\n",
      "Iteration 69, loss = 0.83106372\n",
      "Iteration 70, loss = 0.84696648\n",
      "Iteration 71, loss = 0.82572970\n",
      "Iteration 72, loss = 0.81693037\n",
      "Iteration 73, loss = 0.81531262\n",
      "Iteration 74, loss = 0.80284116\n",
      "Iteration 75, loss = 0.80073464\n",
      "Iteration 76, loss = 0.80216700\n",
      "Iteration 77, loss = 0.79326880\n",
      "Iteration 78, loss = 0.78882826\n",
      "Iteration 79, loss = 0.77809572\n",
      "Iteration 80, loss = 0.78437288\n",
      "Iteration 81, loss = 0.77152204\n",
      "Iteration 82, loss = 0.77341695\n",
      "Iteration 83, loss = 0.76850715\n",
      "Iteration 84, loss = 0.75971183\n",
      "Iteration 85, loss = 0.76962773\n",
      "Iteration 86, loss = 0.77518460\n",
      "Iteration 87, loss = 0.77314226\n",
      "Iteration 88, loss = 0.75111757\n",
      "Iteration 89, loss = 0.74288595\n",
      "Iteration 90, loss = 0.74523253\n",
      "Iteration 91, loss = 0.73893431\n",
      "Iteration 92, loss = 0.73656610\n",
      "Iteration 93, loss = 0.72967506\n",
      "Iteration 94, loss = 0.72926606\n",
      "Iteration 95, loss = 0.72773214\n",
      "Iteration 96, loss = 0.71726574\n",
      "Iteration 97, loss = 0.71437495\n",
      "Iteration 98, loss = 0.71837240\n",
      "Iteration 99, loss = 0.71271885\n",
      "Iteration 100, loss = 0.73493012\n",
      "Iteration 101, loss = 0.70689847\n",
      "Iteration 102, loss = 0.70133784\n",
      "Iteration 103, loss = 0.70827625\n",
      "Iteration 104, loss = 0.69435649\n",
      "Iteration 105, loss = 0.69561410\n",
      "Iteration 106, loss = 0.69557021\n",
      "Iteration 107, loss = 0.68852099\n",
      "Iteration 108, loss = 0.68324815\n",
      "Iteration 109, loss = 0.69028638\n",
      "Iteration 110, loss = 0.68083012\n",
      "Iteration 111, loss = 0.68437478\n",
      "Iteration 112, loss = 0.67470594\n",
      "Iteration 113, loss = 0.67498128\n",
      "Iteration 114, loss = 0.66568072\n",
      "Iteration 115, loss = 0.67136830\n",
      "Iteration 116, loss = 0.67085886\n",
      "Iteration 117, loss = 0.66732904\n",
      "Iteration 118, loss = 0.66426217\n",
      "Iteration 119, loss = 0.66324873\n",
      "Iteration 120, loss = 0.66350348\n",
      "Iteration 121, loss = 0.65865281\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 122, loss = 0.65655847\n",
      "Iteration 123, loss = 0.65329040\n",
      "Iteration 124, loss = 0.65199649\n",
      "Iteration 125, loss = 0.65533203\n",
      "Iteration 126, loss = 0.64728672\n",
      "Iteration 127, loss = 0.64393480\n",
      "Iteration 128, loss = 0.64824347\n",
      "Iteration 129, loss = 0.64025003\n",
      "Iteration 130, loss = 0.63445065\n",
      "Iteration 131, loss = 0.63278697\n",
      "Iteration 132, loss = 0.63283111\n",
      "Iteration 133, loss = 0.63733238\n",
      "Iteration 134, loss = 0.63051877\n",
      "Iteration 135, loss = 0.62825117\n",
      "Iteration 136, loss = 0.62032389\n",
      "Iteration 137, loss = 0.61970965\n",
      "Iteration 138, loss = 0.62474071\n",
      "Iteration 139, loss = 0.62619370\n",
      "Iteration 140, loss = 0.64250617\n",
      "Iteration 141, loss = 0.62359827\n",
      "Iteration 142, loss = 0.61219407\n",
      "Iteration 143, loss = 0.61735365\n",
      "Iteration 144, loss = 0.61210862\n",
      "Iteration 145, loss = 0.60949162\n",
      "Iteration 146, loss = 0.60610640\n",
      "Iteration 147, loss = 0.61155888\n",
      "Iteration 148, loss = 0.60695150\n",
      "Iteration 149, loss = 0.60747476\n",
      "Iteration 150, loss = 0.60546215\n",
      "Iteration 151, loss = 0.60347936\n",
      "Iteration 152, loss = 0.59864138\n",
      "Iteration 153, loss = 0.59549392\n",
      "Iteration 154, loss = 0.59238053\n",
      "Iteration 155, loss = 0.59217301\n",
      "Iteration 156, loss = 0.58625765\n",
      "Iteration 157, loss = 0.60289683\n",
      "Iteration 158, loss = 0.59200197\n",
      "Iteration 159, loss = 0.58936830\n",
      "Iteration 160, loss = 0.59367157\n",
      "Iteration 161, loss = 0.58260176\n",
      "Iteration 162, loss = 0.58137313\n",
      "Iteration 163, loss = 0.58406316\n",
      "Iteration 164, loss = 0.58222768\n",
      "Iteration 165, loss = 0.57811271\n",
      "Iteration 166, loss = 0.58847957\n",
      "Iteration 167, loss = 0.58584287\n",
      "Iteration 168, loss = 0.57989964\n",
      "Iteration 169, loss = 0.58361921\n",
      "Iteration 170, loss = 0.57135349\n",
      "Iteration 171, loss = 0.57348616\n",
      "Iteration 172, loss = 0.56868749\n",
      "Iteration 173, loss = 0.56695683\n",
      "Iteration 174, loss = 0.57841640\n",
      "Iteration 175, loss = 0.56875782\n",
      "Iteration 176, loss = 0.56169427\n",
      "Iteration 177, loss = 0.56339814\n",
      "Iteration 178, loss = 0.56369162\n",
      "Iteration 179, loss = 0.56603451\n",
      "Iteration 180, loss = 0.56352260\n",
      "Iteration 181, loss = 0.56059759\n",
      "Iteration 182, loss = 0.55434387\n",
      "Iteration 183, loss = 0.56675954\n",
      "Iteration 184, loss = 0.55367606\n",
      "Iteration 185, loss = 0.55183399\n",
      "Iteration 186, loss = 0.55601779\n",
      "Iteration 187, loss = 0.54980777\n",
      "Iteration 188, loss = 0.54732644\n",
      "Iteration 189, loss = 0.55354555\n",
      "Iteration 190, loss = 0.54780155\n",
      "Iteration 191, loss = 0.54676308\n",
      "Iteration 192, loss = 0.54406327\n",
      "Iteration 193, loss = 0.55415568\n",
      "Iteration 194, loss = 0.54784016\n",
      "Iteration 195, loss = 0.54286853\n",
      "Iteration 196, loss = 0.54219749\n",
      "Iteration 197, loss = 0.54399913\n",
      "Iteration 198, loss = 0.54012434\n",
      "Iteration 199, loss = 0.54301528\n",
      "Iteration 200, loss = 0.54408419\n",
      "Iteration 201, loss = 0.54465611\n",
      "Iteration 202, loss = 0.53798211\n",
      "Iteration 203, loss = 0.54073271\n",
      "Iteration 204, loss = 0.53251996\n",
      "Iteration 205, loss = 0.53538845\n",
      "Iteration 206, loss = 0.53703065\n",
      "Iteration 207, loss = 0.53286495\n",
      "Iteration 208, loss = 0.53704135\n",
      "Iteration 209, loss = 0.53192421\n",
      "Iteration 210, loss = 0.52836804\n",
      "Iteration 211, loss = 0.52858805\n",
      "Iteration 212, loss = 0.52484019\n",
      "Iteration 213, loss = 0.51999882\n",
      "Iteration 214, loss = 0.53117809\n",
      "Iteration 215, loss = 0.52803273\n",
      "Iteration 216, loss = 0.52447894\n",
      "Iteration 217, loss = 0.52576112\n",
      "Iteration 218, loss = 0.52294078\n",
      "Iteration 219, loss = 0.52240523\n",
      "Iteration 220, loss = 0.52308388\n",
      "Iteration 221, loss = 0.51383962\n",
      "Iteration 222, loss = 0.52027428\n",
      "Iteration 223, loss = 0.52350111\n",
      "Iteration 224, loss = 0.51827697\n",
      "Iteration 225, loss = 0.51304549\n",
      "Iteration 226, loss = 0.51256155\n",
      "Iteration 227, loss = 0.52375623\n",
      "Iteration 228, loss = 0.51299502\n",
      "Iteration 229, loss = 0.50734234\n",
      "Iteration 230, loss = 0.51054531\n",
      "Iteration 231, loss = 0.51124996\n",
      "Iteration 232, loss = 0.50910516\n",
      "Iteration 233, loss = 0.50967115\n",
      "Iteration 234, loss = 0.51212600\n",
      "Iteration 235, loss = 0.51280560\n",
      "Iteration 236, loss = 0.49759202\n",
      "Iteration 237, loss = 0.50579337\n",
      "Iteration 238, loss = 0.50276015\n",
      "Iteration 239, loss = 0.50633133\n",
      "Iteration 240, loss = 0.50750987\n",
      "Iteration 241, loss = 0.50445410\n",
      "Iteration 242, loss = 0.50028914\n",
      "Iteration 243, loss = 0.49887464\n",
      "Iteration 244, loss = 0.50012892\n",
      "Iteration 245, loss = 0.49658769\n",
      "Iteration 246, loss = 0.50101670\n",
      "Iteration 247, loss = 0.49562485\n",
      "Iteration 248, loss = 0.49978871\n",
      "Iteration 249, loss = 0.50313707\n",
      "Iteration 250, loss = 0.49964462\n",
      "Iteration 251, loss = 0.49541320\n",
      "Iteration 252, loss = 0.49048025\n",
      "Iteration 253, loss = 0.49231958\n",
      "Iteration 254, loss = 0.49488931\n",
      "Iteration 255, loss = 0.49583419\n",
      "Iteration 256, loss = 0.48866705\n",
      "Iteration 257, loss = 0.48532694\n",
      "Iteration 258, loss = 0.48106000\n",
      "Iteration 259, loss = 0.48788996\n",
      "Iteration 260, loss = 0.48895207\n",
      "Iteration 261, loss = 0.49277511\n",
      "Iteration 262, loss = 0.48918557\n",
      "Iteration 263, loss = 0.48472903\n",
      "Iteration 264, loss = 0.49695086\n",
      "Iteration 265, loss = 0.48073117\n",
      "Iteration 266, loss = 0.48193109\n",
      "Iteration 267, loss = 0.48730495\n",
      "Iteration 268, loss = 0.48318519\n",
      "Iteration 269, loss = 0.48791287\n",
      "Iteration 270, loss = 0.48099600\n",
      "Iteration 271, loss = 0.47442914\n",
      "Iteration 272, loss = 0.48115153\n",
      "Iteration 273, loss = 0.47967241\n",
      "Iteration 274, loss = 0.48113711\n",
      "Iteration 275, loss = 0.47593992\n",
      "Iteration 276, loss = 0.47249967\n",
      "Iteration 277, loss = 0.47849557\n",
      "Iteration 278, loss = 0.48265360\n",
      "Iteration 279, loss = 0.48047806\n",
      "Iteration 280, loss = 0.47098556\n",
      "Iteration 281, loss = 0.47223857\n",
      "Iteration 282, loss = 0.47653964\n",
      "Iteration 283, loss = 0.47483715\n",
      "Iteration 284, loss = 0.46827367\n",
      "Iteration 285, loss = 0.47045803\n",
      "Iteration 286, loss = 0.47224442\n",
      "Iteration 287, loss = 0.46745251\n",
      "Iteration 288, loss = 0.46892284\n",
      "Iteration 289, loss = 0.47273871\n",
      "Iteration 290, loss = 0.46899166\n",
      "Iteration 291, loss = 0.46684310\n",
      "Iteration 292, loss = 0.46773426\n",
      "Iteration 293, loss = 0.47386961\n",
      "Iteration 294, loss = 0.46450862\n",
      "Iteration 295, loss = 0.46450606\n",
      "Iteration 296, loss = 0.46468343\n",
      "Iteration 297, loss = 0.47048647\n",
      "Iteration 298, loss = 0.46886511\n",
      "Iteration 299, loss = 0.47134239\n",
      "Iteration 300, loss = 0.46330211\n",
      "Iteration 301, loss = 0.46399953\n",
      "Iteration 302, loss = 0.46488893\n",
      "Iteration 303, loss = 0.46564310\n",
      "Iteration 304, loss = 0.46612063\n",
      "Iteration 305, loss = 0.46125407\n",
      "Iteration 306, loss = 0.46813136\n",
      "Iteration 307, loss = 0.45960937\n",
      "Iteration 308, loss = 0.46355543\n",
      "Iteration 309, loss = 0.46140531\n",
      "Iteration 310, loss = 0.45162760\n",
      "Iteration 311, loss = 0.45983654\n",
      "Iteration 312, loss = 0.45787828\n",
      "Iteration 313, loss = 0.45804640\n",
      "Iteration 314, loss = 0.45591379\n",
      "Iteration 315, loss = 0.45327077\n",
      "Iteration 316, loss = 0.45441077\n",
      "Iteration 317, loss = 0.46126791\n",
      "Iteration 318, loss = 0.45342015\n",
      "Iteration 319, loss = 0.46578617\n",
      "Iteration 320, loss = 0.44932613\n",
      "Iteration 321, loss = 0.45342688\n",
      "Iteration 322, loss = 0.45771326\n",
      "Iteration 323, loss = 0.44859950\n",
      "Iteration 324, loss = 0.45871477\n",
      "Iteration 325, loss = 0.45134458\n",
      "Iteration 326, loss = 0.44696332\n",
      "Iteration 327, loss = 0.45182972\n",
      "Iteration 328, loss = 0.45656636\n",
      "Iteration 329, loss = 0.47712245\n",
      "Iteration 330, loss = 0.44493526\n",
      "Iteration 331, loss = 0.44814908\n",
      "Iteration 332, loss = 0.46079327\n",
      "Iteration 333, loss = 0.44591877\n",
      "Iteration 334, loss = 0.44895435\n",
      "Iteration 335, loss = 0.44564571\n",
      "Iteration 336, loss = 0.45786318\n",
      "Iteration 337, loss = 0.44484937\n",
      "Iteration 338, loss = 0.44432998\n",
      "Iteration 339, loss = 0.44397392\n",
      "Iteration 340, loss = 0.44073320\n",
      "Iteration 341, loss = 0.44387120\n",
      "Iteration 342, loss = 0.44234013\n",
      "Iteration 343, loss = 0.44829802\n",
      "Iteration 344, loss = 0.43977183\n",
      "Iteration 345, loss = 0.43854443\n",
      "Iteration 346, loss = 0.44517932\n",
      "Iteration 347, loss = 0.43847295\n",
      "Iteration 348, loss = 0.43842985\n",
      "Iteration 349, loss = 0.43665275\n",
      "Iteration 350, loss = 0.43568764\n",
      "Iteration 351, loss = 0.43698296\n",
      "Iteration 352, loss = 0.44037325\n",
      "Iteration 353, loss = 0.44737674\n",
      "Iteration 354, loss = 0.43990065\n",
      "Iteration 355, loss = 0.43835936\n",
      "Iteration 356, loss = 0.44225098\n",
      "Iteration 357, loss = 0.43378426\n",
      "Iteration 358, loss = 0.43657282\n",
      "Iteration 359, loss = 0.43453140\n",
      "Iteration 360, loss = 0.43596371\n",
      "Iteration 361, loss = 0.42925989\n",
      "Iteration 362, loss = 0.43217558\n",
      "Iteration 363, loss = 0.43238675\n",
      "Iteration 364, loss = 0.43318842\n",
      "Iteration 365, loss = 0.43413158\n",
      "Iteration 366, loss = 0.42961389\n",
      "Iteration 367, loss = 0.43249031\n",
      "Iteration 368, loss = 0.43291766\n",
      "Iteration 369, loss = 0.42860266\n",
      "Iteration 370, loss = 0.43100393\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 371, loss = 0.43104759\n",
      "Iteration 372, loss = 0.43036049\n",
      "Iteration 373, loss = 0.42837342\n",
      "Iteration 374, loss = 0.42917949\n",
      "Iteration 375, loss = 0.43070779\n",
      "Iteration 376, loss = 0.42533628\n",
      "Iteration 377, loss = 0.43029900\n",
      "Iteration 378, loss = 0.42722007\n",
      "Iteration 379, loss = 0.42811904\n",
      "Iteration 380, loss = 0.43037015\n",
      "Iteration 381, loss = 0.42195584\n",
      "Iteration 382, loss = 0.43430884\n",
      "Iteration 383, loss = 0.42593671\n",
      "Iteration 384, loss = 0.42712001\n",
      "Iteration 385, loss = 0.42798146\n",
      "Iteration 386, loss = 0.43497970\n",
      "Iteration 387, loss = 0.42802480\n",
      "Iteration 388, loss = 0.42784906\n",
      "Iteration 389, loss = 0.42957872\n",
      "Iteration 390, loss = 0.42969758\n",
      "Iteration 391, loss = 0.42103103\n",
      "Iteration 392, loss = 0.42374005\n",
      "Iteration 393, loss = 0.41993238\n",
      "Iteration 394, loss = 0.42011080\n",
      "Iteration 395, loss = 0.42961812\n",
      "Iteration 396, loss = 0.42912626\n",
      "Iteration 397, loss = 0.42301137\n",
      "Iteration 398, loss = 0.42706524\n",
      "Iteration 399, loss = 0.41604078\n",
      "Iteration 400, loss = 0.41543248\n",
      "Iteration 401, loss = 0.42516526\n",
      "Iteration 402, loss = 0.41844843\n",
      "Iteration 403, loss = 0.42316293\n",
      "Iteration 404, loss = 0.41764255\n",
      "Iteration 405, loss = 0.42339603\n",
      "Iteration 406, loss = 0.42400283\n",
      "Iteration 407, loss = 0.42011684\n",
      "Iteration 408, loss = 0.41748250\n",
      "Iteration 409, loss = 0.41792971\n",
      "Iteration 410, loss = 0.41477625\n",
      "Iteration 411, loss = 0.41605286\n",
      "Iteration 412, loss = 0.41435716\n",
      "Iteration 413, loss = 0.42673628\n",
      "Iteration 414, loss = 0.41086637\n",
      "Iteration 415, loss = 0.41392228\n",
      "Iteration 416, loss = 0.41721237\n",
      "Iteration 417, loss = 0.42468446\n",
      "Iteration 418, loss = 0.41351808\n",
      "Iteration 419, loss = 0.41038129\n",
      "Iteration 420, loss = 0.41203174\n",
      "Iteration 421, loss = 0.41323645\n",
      "Iteration 422, loss = 0.41418195\n",
      "Iteration 423, loss = 0.41875709\n",
      "Iteration 424, loss = 0.41550829\n",
      "Iteration 425, loss = 0.40847579\n",
      "Iteration 426, loss = 0.40616824\n",
      "Iteration 427, loss = 0.41224045\n",
      "Iteration 428, loss = 0.48891139\n",
      "Iteration 429, loss = 0.42594185\n",
      "Iteration 430, loss = 0.41510813\n",
      "Iteration 431, loss = 0.42184764\n",
      "Iteration 432, loss = 0.41057131\n",
      "Iteration 433, loss = 0.41752624\n",
      "Iteration 434, loss = 0.41721350\n",
      "Iteration 435, loss = 0.41599586\n",
      "Iteration 436, loss = 0.40767862\n",
      "Iteration 437, loss = 0.40447847\n",
      "Iteration 438, loss = 0.41873428\n",
      "Iteration 439, loss = 0.40813889\n",
      "Iteration 440, loss = 0.40988762\n",
      "Iteration 441, loss = 0.41447925\n",
      "Iteration 442, loss = 0.41348288\n",
      "Iteration 443, loss = 0.41114124\n",
      "Iteration 444, loss = 0.41199099\n",
      "Iteration 445, loss = 0.40398163\n",
      "Iteration 446, loss = 0.41242553\n",
      "Iteration 447, loss = 0.41037800\n",
      "Iteration 448, loss = 0.40799731\n",
      "Iteration 449, loss = 0.40813598\n",
      "Iteration 450, loss = 0.40525685\n",
      "Iteration 451, loss = 0.41123402\n",
      "Iteration 452, loss = 0.40719764\n",
      "Iteration 453, loss = 0.40558871\n",
      "Iteration 454, loss = 0.40478210\n",
      "Iteration 455, loss = 0.39857943\n",
      "Iteration 456, loss = 0.40077902\n",
      "Iteration 457, loss = 0.41436750\n",
      "Iteration 458, loss = 0.40704900\n",
      "Iteration 459, loss = 0.40068626\n",
      "Iteration 460, loss = 0.40179999\n",
      "Iteration 461, loss = 0.40244830\n",
      "Iteration 462, loss = 0.41313623\n",
      "Iteration 463, loss = 0.40008928\n",
      "Iteration 464, loss = 0.40871503\n",
      "Iteration 465, loss = 0.39852562\n",
      "Iteration 466, loss = 0.39781222\n",
      "Iteration 467, loss = 0.40219014\n",
      "Iteration 468, loss = 0.40199849\n",
      "Iteration 469, loss = 0.40133484\n",
      "Iteration 470, loss = 0.40108599\n",
      "Iteration 471, loss = 0.40146141\n",
      "Iteration 472, loss = 0.40281692\n",
      "Iteration 473, loss = 0.40471289\n",
      "Iteration 474, loss = 0.40164340\n",
      "Iteration 475, loss = 0.40058171\n",
      "Iteration 476, loss = 0.40037956\n",
      "Iteration 477, loss = 0.40482260\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.183582419932365\n",
      "Iteration 1, loss = 17.42883374\n",
      "Iteration 2, loss = 4.88278085\n",
      "Iteration 3, loss = 4.04852912\n",
      "Iteration 4, loss = 3.52657554\n",
      "Iteration 5, loss = 3.21213953\n",
      "Iteration 6, loss = 2.99863031\n",
      "Iteration 7, loss = 2.85764551\n",
      "Iteration 8, loss = 2.72039270\n",
      "Iteration 9, loss = 2.59419850\n",
      "Iteration 10, loss = 2.49035403\n",
      "Iteration 11, loss = 2.37706236\n",
      "Iteration 12, loss = 2.26524743\n",
      "Iteration 13, loss = 2.17147374\n",
      "Iteration 14, loss = 2.05811265\n",
      "Iteration 15, loss = 1.95229595\n",
      "Iteration 16, loss = 1.87914340\n",
      "Iteration 17, loss = 1.78962446\n",
      "Iteration 18, loss = 1.71125098\n",
      "Iteration 19, loss = 1.64998810\n",
      "Iteration 20, loss = 1.58901821\n",
      "Iteration 21, loss = 1.54205515\n",
      "Iteration 22, loss = 1.49205272\n",
      "Iteration 23, loss = 1.45128035\n",
      "Iteration 24, loss = 1.41926651\n",
      "Iteration 25, loss = 1.37362550\n",
      "Iteration 26, loss = 1.34781917\n",
      "Iteration 27, loss = 1.32012515\n",
      "Iteration 28, loss = 1.29067386\n",
      "Iteration 29, loss = 1.26248410\n",
      "Iteration 30, loss = 1.23450080\n",
      "Iteration 31, loss = 1.21894968\n",
      "Iteration 32, loss = 1.19597339\n",
      "Iteration 33, loss = 1.18130134\n",
      "Iteration 34, loss = 1.15645864\n",
      "Iteration 35, loss = 1.13873620\n",
      "Iteration 36, loss = 1.12183415\n",
      "Iteration 37, loss = 1.11088120\n",
      "Iteration 38, loss = 1.09577014\n",
      "Iteration 39, loss = 1.08128713\n",
      "Iteration 40, loss = 1.06770185\n",
      "Iteration 41, loss = 1.05701830\n",
      "Iteration 42, loss = 1.04483222\n",
      "Iteration 43, loss = 1.03059036\n",
      "Iteration 44, loss = 1.01659527\n",
      "Iteration 45, loss = 1.00674896\n",
      "Iteration 46, loss = 1.00218295\n",
      "Iteration 47, loss = 0.98074388\n",
      "Iteration 48, loss = 0.99138334\n",
      "Iteration 49, loss = 0.96882235\n",
      "Iteration 50, loss = 0.96695559\n",
      "Iteration 51, loss = 0.95321298\n",
      "Iteration 52, loss = 0.94345171\n",
      "Iteration 53, loss = 0.93348526\n",
      "Iteration 54, loss = 0.93116669\n",
      "Iteration 55, loss = 0.91873241\n",
      "Iteration 56, loss = 0.90751720\n",
      "Iteration 57, loss = 0.90252496\n",
      "Iteration 58, loss = 0.89074976\n",
      "Iteration 59, loss = 0.89684625\n",
      "Iteration 60, loss = 0.88493762\n",
      "Iteration 61, loss = 0.87388909\n",
      "Iteration 62, loss = 0.86919489\n",
      "Iteration 63, loss = 0.87116804\n",
      "Iteration 64, loss = 0.85674296\n",
      "Iteration 65, loss = 0.85632497\n",
      "Iteration 66, loss = 0.86335646\n",
      "Iteration 67, loss = 0.84328422\n",
      "Iteration 68, loss = 0.83559775\n",
      "Iteration 69, loss = 0.82895504\n",
      "Iteration 70, loss = 0.83768417\n",
      "Iteration 71, loss = 0.82212346\n",
      "Iteration 72, loss = 0.81657365\n",
      "Iteration 73, loss = 0.81022819\n",
      "Iteration 74, loss = 0.80592060\n",
      "Iteration 75, loss = 0.79661157\n",
      "Iteration 76, loss = 0.79553350\n",
      "Iteration 77, loss = 0.78864993\n",
      "Iteration 78, loss = 0.78812766\n",
      "Iteration 79, loss = 0.77754237\n",
      "Iteration 80, loss = 0.78153731\n",
      "Iteration 81, loss = 0.76982011\n",
      "Iteration 82, loss = 0.77174761\n",
      "Iteration 83, loss = 0.76764986\n",
      "Iteration 84, loss = 0.75847212\n",
      "Iteration 85, loss = 0.76031922\n",
      "Iteration 86, loss = 0.76918538\n",
      "Iteration 87, loss = 0.77043922\n",
      "Iteration 88, loss = 0.74699220\n",
      "Iteration 89, loss = 0.73945628\n",
      "Iteration 90, loss = 0.73951867\n",
      "Iteration 91, loss = 0.73784744\n",
      "Iteration 92, loss = 0.73585213\n",
      "Iteration 93, loss = 0.72944798\n",
      "Iteration 94, loss = 0.72659818\n",
      "Iteration 95, loss = 0.72965212\n",
      "Iteration 96, loss = 0.71843824\n",
      "Iteration 97, loss = 0.70996581\n",
      "Iteration 98, loss = 0.71612045\n",
      "Iteration 99, loss = 0.70514771\n",
      "Iteration 100, loss = 0.73583479\n",
      "Iteration 101, loss = 0.70505216\n",
      "Iteration 102, loss = 0.70540101\n",
      "Iteration 103, loss = 0.70409377\n",
      "Iteration 104, loss = 0.69226115\n",
      "Iteration 105, loss = 0.68632512\n",
      "Iteration 106, loss = 0.69357040\n",
      "Iteration 107, loss = 0.68419391\n",
      "Iteration 108, loss = 0.67756504\n",
      "Iteration 109, loss = 0.68737614\n",
      "Iteration 110, loss = 0.67941150\n",
      "Iteration 111, loss = 0.68647713\n",
      "Iteration 112, loss = 0.67341605\n",
      "Iteration 113, loss = 0.67903750\n",
      "Iteration 114, loss = 0.66224276\n",
      "Iteration 115, loss = 0.66346775\n",
      "Iteration 116, loss = 0.67555314\n",
      "Iteration 117, loss = 0.66591613\n",
      "Iteration 118, loss = 0.66135837\n",
      "Iteration 119, loss = 0.65350136\n",
      "Iteration 120, loss = 0.66247263\n",
      "Iteration 121, loss = 0.65233197\n",
      "Iteration 122, loss = 0.64687677\n",
      "Iteration 123, loss = 0.64460542\n",
      "Iteration 124, loss = 0.65008526\n",
      "Iteration 125, loss = 0.65001307\n",
      "Iteration 126, loss = 0.63680556\n",
      "Iteration 127, loss = 0.63534098\n",
      "Iteration 128, loss = 0.64475473\n",
      "Iteration 129, loss = 0.63303929\n",
      "Iteration 130, loss = 0.62791018\n",
      "Iteration 131, loss = 0.62920468\n",
      "Iteration 132, loss = 0.62767434\n",
      "Iteration 133, loss = 0.63201965\n",
      "Iteration 134, loss = 0.63293718\n",
      "Iteration 135, loss = 0.62716965\n",
      "Iteration 136, loss = 0.61248064\n",
      "Iteration 137, loss = 0.61691857\n",
      "Iteration 138, loss = 0.62044265\n",
      "Iteration 139, loss = 0.61084295\n",
      "Iteration 140, loss = 0.61777839\n",
      "Iteration 141, loss = 0.61407207\n",
      "Iteration 142, loss = 0.60488644\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 143, loss = 0.60819029\n",
      "Iteration 144, loss = 0.60605333\n",
      "Iteration 145, loss = 0.60090154\n",
      "Iteration 146, loss = 0.60151812\n",
      "Iteration 147, loss = 0.60374150\n",
      "Iteration 148, loss = 0.60725252\n",
      "Iteration 149, loss = 0.60969982\n",
      "Iteration 150, loss = 0.59861556\n",
      "Iteration 151, loss = 0.59674106\n",
      "Iteration 152, loss = 0.59031796\n",
      "Iteration 153, loss = 0.58838119\n",
      "Iteration 154, loss = 0.58956636\n",
      "Iteration 155, loss = 0.58800973\n",
      "Iteration 156, loss = 0.57686772\n",
      "Iteration 157, loss = 0.58950684\n",
      "Iteration 158, loss = 0.58216143\n",
      "Iteration 159, loss = 0.58251950\n",
      "Iteration 160, loss = 0.57929152\n",
      "Iteration 161, loss = 0.57621686\n",
      "Iteration 162, loss = 0.57278696\n",
      "Iteration 163, loss = 0.57770641\n",
      "Iteration 164, loss = 0.57672702\n",
      "Iteration 165, loss = 0.56873381\n",
      "Iteration 166, loss = 0.58915948\n",
      "Iteration 167, loss = 0.57351080\n",
      "Iteration 168, loss = 0.56798947\n",
      "Iteration 169, loss = 0.58312482\n",
      "Iteration 170, loss = 0.56311618\n",
      "Iteration 171, loss = 0.56551526\n",
      "Iteration 172, loss = 0.57248223\n",
      "Iteration 173, loss = 0.56089260\n",
      "Iteration 174, loss = 0.56952152\n",
      "Iteration 175, loss = 0.56458859\n",
      "Iteration 176, loss = 0.55672563\n",
      "Iteration 177, loss = 0.56274134\n",
      "Iteration 178, loss = 0.55657432\n",
      "Iteration 179, loss = 0.55751819\n",
      "Iteration 180, loss = 0.55678789\n",
      "Iteration 181, loss = 0.55000946\n",
      "Iteration 182, loss = 0.55494070\n",
      "Iteration 183, loss = 0.55827452\n",
      "Iteration 184, loss = 0.55323153\n",
      "Iteration 185, loss = 0.54953518\n",
      "Iteration 186, loss = 0.55210862\n",
      "Iteration 187, loss = 0.54517762\n",
      "Iteration 188, loss = 0.54336549\n",
      "Iteration 189, loss = 0.54310290\n",
      "Iteration 190, loss = 0.53678441\n",
      "Iteration 191, loss = 0.54127726\n",
      "Iteration 192, loss = 0.53782256\n",
      "Iteration 193, loss = 0.54507741\n",
      "Iteration 194, loss = 0.54361887\n",
      "Iteration 195, loss = 0.53659155\n",
      "Iteration 196, loss = 0.53759295\n",
      "Iteration 197, loss = 0.53889925\n",
      "Iteration 198, loss = 0.53419128\n",
      "Iteration 199, loss = 0.53563268\n",
      "Iteration 200, loss = 0.53472631\n",
      "Iteration 201, loss = 0.53765308\n",
      "Iteration 202, loss = 0.52959658\n",
      "Iteration 203, loss = 0.53019922\n",
      "Iteration 204, loss = 0.52620881\n",
      "Iteration 205, loss = 0.52688915\n",
      "Iteration 206, loss = 0.52894224\n",
      "Iteration 207, loss = 0.52832880\n",
      "Iteration 208, loss = 0.52320742\n",
      "Iteration 209, loss = 0.52479979\n",
      "Iteration 210, loss = 0.52555198\n",
      "Iteration 211, loss = 0.52205053\n",
      "Iteration 212, loss = 0.52094435\n",
      "Iteration 213, loss = 0.51516557\n",
      "Iteration 214, loss = 0.52631062\n",
      "Iteration 215, loss = 0.52037978\n",
      "Iteration 216, loss = 0.51857248\n",
      "Iteration 217, loss = 0.51824098\n",
      "Iteration 218, loss = 0.52052491\n",
      "Iteration 219, loss = 0.51280020\n",
      "Iteration 220, loss = 0.52081576\n",
      "Iteration 221, loss = 0.51137120\n",
      "Iteration 222, loss = 0.51789975\n",
      "Iteration 223, loss = 0.52230167\n",
      "Iteration 224, loss = 0.51012303\n",
      "Iteration 225, loss = 0.50701072\n",
      "Iteration 226, loss = 0.51502771\n",
      "Iteration 227, loss = 0.52368235\n",
      "Iteration 228, loss = 0.50827395\n",
      "Iteration 229, loss = 0.50363367\n",
      "Iteration 230, loss = 0.50481697\n",
      "Iteration 231, loss = 0.50657157\n",
      "Iteration 232, loss = 0.50392147\n",
      "Iteration 233, loss = 0.50404854\n",
      "Iteration 234, loss = 0.50224794\n",
      "Iteration 235, loss = 0.50335075\n",
      "Iteration 236, loss = 0.49759013\n",
      "Iteration 237, loss = 0.50154571\n",
      "Iteration 238, loss = 0.49845647\n",
      "Iteration 239, loss = 0.50061080\n",
      "Iteration 240, loss = 0.50086024\n",
      "Iteration 241, loss = 0.50118064\n",
      "Iteration 242, loss = 0.49044152\n",
      "Iteration 243, loss = 0.48910066\n",
      "Iteration 244, loss = 0.49756518\n",
      "Iteration 245, loss = 0.49385187\n",
      "Iteration 246, loss = 0.49564671\n",
      "Iteration 247, loss = 0.48622642\n",
      "Iteration 248, loss = 0.49612328\n",
      "Iteration 249, loss = 0.50178786\n",
      "Iteration 250, loss = 0.49806724\n",
      "Iteration 251, loss = 0.49384387\n",
      "Iteration 252, loss = 0.48433074\n",
      "Iteration 253, loss = 0.48772752\n",
      "Iteration 254, loss = 0.48362931\n",
      "Iteration 255, loss = 0.48465377\n",
      "Iteration 256, loss = 0.48407067\n",
      "Iteration 257, loss = 0.48030673\n",
      "Iteration 258, loss = 0.47479037\n",
      "Iteration 259, loss = 0.48361759\n",
      "Iteration 260, loss = 0.48930988\n",
      "Iteration 261, loss = 0.48892188\n",
      "Iteration 262, loss = 0.48250194\n",
      "Iteration 263, loss = 0.48199745\n",
      "Iteration 264, loss = 0.49026309\n",
      "Iteration 265, loss = 0.48018481\n",
      "Iteration 266, loss = 0.48061737\n",
      "Iteration 267, loss = 0.47738267\n",
      "Iteration 268, loss = 0.47878453\n",
      "Iteration 269, loss = 0.48211128\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.769229222239621\n",
      "Iteration 1, loss = 17.48493978\n",
      "Iteration 2, loss = 4.94971805\n",
      "Iteration 3, loss = 4.10940520\n",
      "Iteration 4, loss = 3.57904275\n",
      "Iteration 5, loss = 3.26206839\n",
      "Iteration 6, loss = 3.05297224\n",
      "Iteration 7, loss = 2.91392424\n",
      "Iteration 8, loss = 2.76960214\n",
      "Iteration 9, loss = 2.64610049\n",
      "Iteration 10, loss = 2.53212326\n",
      "Iteration 11, loss = 2.41898953\n",
      "Iteration 12, loss = 2.30896948\n",
      "Iteration 13, loss = 2.20732230\n",
      "Iteration 14, loss = 2.09074933\n",
      "Iteration 15, loss = 1.98777874\n",
      "Iteration 16, loss = 1.91062544\n",
      "Iteration 17, loss = 1.81706035\n",
      "Iteration 18, loss = 1.73951792\n",
      "Iteration 19, loss = 1.67259949\n",
      "Iteration 20, loss = 1.61554542\n",
      "Iteration 21, loss = 1.56822386\n",
      "Iteration 22, loss = 1.51539721\n",
      "Iteration 23, loss = 1.46966867\n",
      "Iteration 24, loss = 1.43005595\n",
      "Iteration 25, loss = 1.39498082\n",
      "Iteration 26, loss = 1.36242909\n",
      "Iteration 27, loss = 1.33902073\n",
      "Iteration 28, loss = 1.30985788\n",
      "Iteration 29, loss = 1.27788037\n",
      "Iteration 30, loss = 1.24712091\n",
      "Iteration 31, loss = 1.22955246\n",
      "Iteration 32, loss = 1.20918123\n",
      "Iteration 33, loss = 1.19308590\n",
      "Iteration 34, loss = 1.16971634\n",
      "Iteration 35, loss = 1.15041764\n",
      "Iteration 36, loss = 1.13478903\n",
      "Iteration 37, loss = 1.12286444\n",
      "Iteration 38, loss = 1.10927195\n",
      "Iteration 39, loss = 1.09939286\n",
      "Iteration 40, loss = 1.08201359\n",
      "Iteration 41, loss = 1.06614409\n",
      "Iteration 42, loss = 1.05332459\n",
      "Iteration 43, loss = 1.03646055\n",
      "Iteration 44, loss = 1.02817978\n",
      "Iteration 45, loss = 1.01481430\n",
      "Iteration 46, loss = 1.01752543\n",
      "Iteration 47, loss = 0.99311174\n",
      "Iteration 48, loss = 1.00167598\n",
      "Iteration 49, loss = 0.98032410\n",
      "Iteration 50, loss = 0.97776155\n",
      "Iteration 51, loss = 0.96471661\n",
      "Iteration 52, loss = 0.95875075\n",
      "Iteration 53, loss = 0.95009094\n",
      "Iteration 54, loss = 0.94189694\n",
      "Iteration 55, loss = 0.93228627\n",
      "Iteration 56, loss = 0.92015131\n",
      "Iteration 57, loss = 0.91785134\n",
      "Iteration 58, loss = 0.90337940\n",
      "Iteration 59, loss = 0.90669181\n",
      "Iteration 60, loss = 0.89884460\n",
      "Iteration 61, loss = 0.88905953\n",
      "Iteration 62, loss = 0.88691687\n",
      "Iteration 63, loss = 0.87926634\n",
      "Iteration 64, loss = 0.86986723\n",
      "Iteration 65, loss = 0.86787558\n",
      "Iteration 66, loss = 0.87302422\n",
      "Iteration 67, loss = 0.85264771\n",
      "Iteration 68, loss = 0.84920257\n",
      "Iteration 69, loss = 0.84403574\n",
      "Iteration 70, loss = 0.84001419\n",
      "Iteration 71, loss = 0.83193491\n",
      "Iteration 72, loss = 0.82958849\n",
      "Iteration 73, loss = 0.82189204\n",
      "Iteration 74, loss = 0.82118919\n",
      "Iteration 75, loss = 0.81258271\n",
      "Iteration 76, loss = 0.80624245\n",
      "Iteration 77, loss = 0.80722352\n",
      "Iteration 78, loss = 0.80530474\n",
      "Iteration 79, loss = 0.79314659\n",
      "Iteration 80, loss = 0.79690326\n",
      "Iteration 81, loss = 0.78715572\n",
      "Iteration 82, loss = 0.78934022\n",
      "Iteration 83, loss = 0.78160871\n",
      "Iteration 84, loss = 0.77400457\n",
      "Iteration 85, loss = 0.77425169\n",
      "Iteration 86, loss = 0.78324818\n",
      "Iteration 87, loss = 0.78524848\n",
      "Iteration 88, loss = 0.76208825\n",
      "Iteration 89, loss = 0.75652373\n",
      "Iteration 90, loss = 0.75840433\n",
      "Iteration 91, loss = 0.75591478\n",
      "Iteration 92, loss = 0.75565945\n",
      "Iteration 93, loss = 0.74832395\n",
      "Iteration 94, loss = 0.73851905\n",
      "Iteration 95, loss = 0.74532361\n",
      "Iteration 96, loss = 0.74188743\n",
      "Iteration 97, loss = 0.72811129\n",
      "Iteration 98, loss = 0.73029840\n",
      "Iteration 99, loss = 0.72357105\n",
      "Iteration 100, loss = 0.73685821\n",
      "Iteration 101, loss = 0.71976449\n",
      "Iteration 102, loss = 0.72088423\n",
      "Iteration 103, loss = 0.71888703\n",
      "Iteration 104, loss = 0.70943203\n",
      "Iteration 105, loss = 0.70218514\n",
      "Iteration 106, loss = 0.70916969\n",
      "Iteration 107, loss = 0.70624183\n",
      "Iteration 108, loss = 0.69552693\n",
      "Iteration 109, loss = 0.69664842\n",
      "Iteration 110, loss = 0.69016940\n",
      "Iteration 111, loss = 0.69675451\n",
      "Iteration 112, loss = 0.68668523\n",
      "Iteration 113, loss = 0.68970394\n",
      "Iteration 114, loss = 0.67746994\n",
      "Iteration 115, loss = 0.68454886\n",
      "Iteration 116, loss = 0.69540828\n",
      "Iteration 117, loss = 0.67597742\n",
      "Iteration 118, loss = 0.67030298\n",
      "Iteration 119, loss = 0.66455576\n",
      "Iteration 120, loss = 0.67070662\n",
      "Iteration 121, loss = 0.66579960\n",
      "Iteration 122, loss = 0.66421106\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 123, loss = 0.65508440\n",
      "Iteration 124, loss = 0.66018485\n",
      "Iteration 125, loss = 0.66704206\n",
      "Iteration 126, loss = 0.65011213\n",
      "Iteration 127, loss = 0.64747254\n",
      "Iteration 128, loss = 0.65723402\n",
      "Iteration 129, loss = 0.64266753\n",
      "Iteration 130, loss = 0.63801303\n",
      "Iteration 131, loss = 0.63505639\n",
      "Iteration 132, loss = 0.63938517\n",
      "Iteration 133, loss = 0.63596629\n",
      "Iteration 134, loss = 0.63660047\n",
      "Iteration 135, loss = 0.63722058\n",
      "Iteration 136, loss = 0.63272245\n",
      "Iteration 137, loss = 0.62956198\n",
      "Iteration 138, loss = 0.63431416\n",
      "Iteration 139, loss = 0.62645518\n",
      "Iteration 140, loss = 0.64038767\n",
      "Iteration 141, loss = 0.62616549\n",
      "Iteration 142, loss = 0.61644994\n",
      "Iteration 143, loss = 0.61630182\n",
      "Iteration 144, loss = 0.61408230\n",
      "Iteration 145, loss = 0.61138924\n",
      "Iteration 146, loss = 0.61151107\n",
      "Iteration 147, loss = 0.61275820\n",
      "Iteration 148, loss = 0.62189798\n",
      "Iteration 149, loss = 0.63289414\n",
      "Iteration 150, loss = 0.60120345\n",
      "Iteration 151, loss = 0.61118547\n",
      "Iteration 152, loss = 0.60205389\n",
      "Iteration 153, loss = 0.59908503\n",
      "Iteration 154, loss = 0.59790132\n",
      "Iteration 155, loss = 0.59850268\n",
      "Iteration 156, loss = 0.58972333\n",
      "Iteration 157, loss = 0.59398697\n",
      "Iteration 158, loss = 0.59361674\n",
      "Iteration 159, loss = 0.58645516\n",
      "Iteration 160, loss = 0.58674102\n",
      "Iteration 161, loss = 0.58395656\n",
      "Iteration 162, loss = 0.58592781\n",
      "Iteration 163, loss = 0.59024511\n",
      "Iteration 164, loss = 0.58157216\n",
      "Iteration 165, loss = 0.58045972\n",
      "Iteration 166, loss = 0.59638773\n",
      "Iteration 167, loss = 0.57821918\n",
      "Iteration 168, loss = 0.57861393\n",
      "Iteration 169, loss = 0.58507693\n",
      "Iteration 170, loss = 0.57168871\n",
      "Iteration 171, loss = 0.57822850\n",
      "Iteration 172, loss = 0.58783918\n",
      "Iteration 173, loss = 0.56943629\n",
      "Iteration 174, loss = 0.57253788\n",
      "Iteration 175, loss = 0.57534821\n",
      "Iteration 176, loss = 0.55988060\n",
      "Iteration 177, loss = 0.57287881\n",
      "Iteration 178, loss = 0.56237955\n",
      "Iteration 179, loss = 0.56012583\n",
      "Iteration 180, loss = 0.56999002\n",
      "Iteration 181, loss = 0.57164683\n",
      "Iteration 182, loss = 0.56482802\n",
      "Iteration 183, loss = 0.56786609\n",
      "Iteration 184, loss = 0.56088246\n",
      "Iteration 185, loss = 0.55539051\n",
      "Iteration 186, loss = 0.56323353\n",
      "Iteration 187, loss = 0.55344143\n",
      "Iteration 188, loss = 0.55191726\n",
      "Iteration 189, loss = 0.54630866\n",
      "Iteration 190, loss = 0.54632547\n",
      "Iteration 191, loss = 0.54619658\n",
      "Iteration 192, loss = 0.54727774\n",
      "Iteration 193, loss = 0.54662711\n",
      "Iteration 194, loss = 0.54567124\n",
      "Iteration 195, loss = 0.54260805\n",
      "Iteration 196, loss = 0.54964807\n",
      "Iteration 197, loss = 0.54591305\n",
      "Iteration 198, loss = 0.53859596\n",
      "Iteration 199, loss = 0.54678099\n",
      "Iteration 200, loss = 0.54270489\n",
      "Iteration 201, loss = 0.53964690\n",
      "Iteration 202, loss = 0.53375752\n",
      "Iteration 203, loss = 0.53438201\n",
      "Iteration 204, loss = 0.53302246\n",
      "Iteration 205, loss = 0.53464978\n",
      "Iteration 206, loss = 0.53645317\n",
      "Iteration 207, loss = 0.53390410\n",
      "Iteration 208, loss = 0.52826436\n",
      "Iteration 209, loss = 0.53089086\n",
      "Iteration 210, loss = 0.54085342\n",
      "Iteration 211, loss = 0.52667337\n",
      "Iteration 212, loss = 0.52318676\n",
      "Iteration 213, loss = 0.51983620\n",
      "Iteration 214, loss = 0.52896430\n",
      "Iteration 215, loss = 0.51961162\n",
      "Iteration 216, loss = 0.52178876\n",
      "Iteration 217, loss = 0.52573645\n",
      "Iteration 218, loss = 0.52249314\n",
      "Iteration 219, loss = 0.51503696\n",
      "Iteration 220, loss = 0.52879461\n",
      "Iteration 221, loss = 0.51591562\n",
      "Iteration 222, loss = 0.52016453\n",
      "Iteration 223, loss = 0.51662783\n",
      "Iteration 224, loss = 0.51585491\n",
      "Iteration 225, loss = 0.51066551\n",
      "Iteration 226, loss = 0.51390090\n",
      "Iteration 227, loss = 0.53802245\n",
      "Iteration 228, loss = 0.52596863\n",
      "Iteration 229, loss = 0.50955517\n",
      "Iteration 230, loss = 0.50636180\n",
      "Iteration 231, loss = 0.51280717\n",
      "Iteration 232, loss = 0.51110036\n",
      "Iteration 233, loss = 0.52378130\n",
      "Iteration 234, loss = 0.50754453\n",
      "Iteration 235, loss = 0.51110974\n",
      "Iteration 236, loss = 0.49930703\n",
      "Iteration 237, loss = 0.50432661\n",
      "Iteration 238, loss = 0.50057297\n",
      "Iteration 239, loss = 0.49859244\n",
      "Iteration 240, loss = 0.49931068\n",
      "Iteration 241, loss = 0.49526806\n",
      "Iteration 242, loss = 0.49083485\n",
      "Iteration 243, loss = 0.49696059\n",
      "Iteration 244, loss = 0.49789818\n",
      "Iteration 245, loss = 0.49398704\n",
      "Iteration 246, loss = 0.50341477\n",
      "Iteration 247, loss = 0.48815232\n",
      "Iteration 248, loss = 0.49606867\n",
      "Iteration 249, loss = 0.50272774\n",
      "Iteration 250, loss = 0.49656867\n",
      "Iteration 251, loss = 0.49811544\n",
      "Iteration 252, loss = 0.48869526\n",
      "Iteration 253, loss = 0.49082209\n",
      "Iteration 254, loss = 0.48631919\n",
      "Iteration 255, loss = 0.48656532\n",
      "Iteration 256, loss = 0.48478819\n",
      "Iteration 257, loss = 0.48670193\n",
      "Iteration 258, loss = 0.47758941\n",
      "Iteration 259, loss = 0.48776128\n",
      "Iteration 260, loss = 0.48855026\n",
      "Iteration 261, loss = 0.48954134\n",
      "Iteration 262, loss = 0.48474687\n",
      "Iteration 263, loss = 0.48751582\n",
      "Iteration 264, loss = 0.49685578\n",
      "Iteration 265, loss = 0.48103199\n",
      "Iteration 266, loss = 0.47471907\n",
      "Iteration 267, loss = 0.47866642\n",
      "Iteration 268, loss = 0.48373305\n",
      "Iteration 269, loss = 0.48415769\n",
      "Iteration 270, loss = 0.48689450\n",
      "Iteration 271, loss = 0.47426651\n",
      "Iteration 272, loss = 0.47541959\n",
      "Iteration 273, loss = 0.47803154\n",
      "Iteration 274, loss = 0.48202450\n",
      "Iteration 275, loss = 0.47691813\n",
      "Iteration 276, loss = 0.47542560\n",
      "Iteration 277, loss = 0.47570009\n",
      "Iteration 278, loss = 0.47530452\n",
      "Iteration 279, loss = 0.48005180\n",
      "Iteration 280, loss = 0.47018445\n",
      "Iteration 281, loss = 0.46589047\n",
      "Iteration 282, loss = 0.47372249\n",
      "Iteration 283, loss = 0.47365858\n",
      "Iteration 284, loss = 0.47130557\n",
      "Iteration 285, loss = 0.46639626\n",
      "Iteration 286, loss = 0.46747409\n",
      "Iteration 287, loss = 0.46842916\n",
      "Iteration 288, loss = 0.46331631\n",
      "Iteration 289, loss = 0.47265163\n",
      "Iteration 290, loss = 0.47243976\n",
      "Iteration 291, loss = 0.46453575\n",
      "Iteration 292, loss = 0.46280968\n",
      "Iteration 293, loss = 0.46743965\n",
      "Iteration 294, loss = 0.46284248\n",
      "Iteration 295, loss = 0.45927647\n",
      "Iteration 296, loss = 0.46552254\n",
      "Iteration 297, loss = 0.46725005\n",
      "Iteration 298, loss = 0.46352160\n",
      "Iteration 299, loss = 0.46413101\n",
      "Iteration 300, loss = 0.46345915\n",
      "Iteration 301, loss = 0.46116754\n",
      "Iteration 302, loss = 0.46172228\n",
      "Iteration 303, loss = 0.45769604\n",
      "Iteration 304, loss = 0.46605419\n",
      "Iteration 305, loss = 0.46338115\n",
      "Iteration 306, loss = 0.46261867\n",
      "Iteration 307, loss = 0.46016795\n",
      "Iteration 308, loss = 0.45937750\n",
      "Iteration 309, loss = 0.45789113\n",
      "Iteration 310, loss = 0.45479071\n",
      "Iteration 311, loss = 0.46194662\n",
      "Iteration 312, loss = 0.45525831\n",
      "Iteration 313, loss = 0.46085363\n",
      "Iteration 314, loss = 0.45889008\n",
      "Iteration 315, loss = 0.46097104\n",
      "Iteration 316, loss = 0.45822494\n",
      "Iteration 317, loss = 0.45861346\n",
      "Iteration 318, loss = 0.45015177\n",
      "Iteration 319, loss = 0.46444364\n",
      "Iteration 320, loss = 0.45244381\n",
      "Iteration 321, loss = 0.45916312\n",
      "Iteration 322, loss = 0.45126102\n",
      "Iteration 323, loss = 0.44760358\n",
      "Iteration 324, loss = 0.45510964\n",
      "Iteration 325, loss = 0.44885142\n",
      "Iteration 326, loss = 0.44695638\n",
      "Iteration 327, loss = 0.45790100\n",
      "Iteration 328, loss = 0.44980783\n",
      "Iteration 329, loss = 0.45643981\n",
      "Iteration 330, loss = 0.44644964\n",
      "Iteration 331, loss = 0.45552930\n",
      "Iteration 332, loss = 0.45098408\n",
      "Iteration 333, loss = 0.45206053\n",
      "Iteration 334, loss = 0.44671236\n",
      "Iteration 335, loss = 0.44605162\n",
      "Iteration 336, loss = 0.45522849\n",
      "Iteration 337, loss = 0.44390233\n",
      "Iteration 338, loss = 0.44564544\n",
      "Iteration 339, loss = 0.44533728\n",
      "Iteration 340, loss = 0.44369210\n",
      "Iteration 341, loss = 0.44202967\n",
      "Iteration 342, loss = 0.43858185\n",
      "Iteration 343, loss = 0.44849796\n",
      "Iteration 344, loss = 0.43693376\n",
      "Iteration 345, loss = 0.43750861\n",
      "Iteration 346, loss = 0.43738356\n",
      "Iteration 347, loss = 0.43845743\n",
      "Iteration 348, loss = 0.44338093\n",
      "Iteration 349, loss = 0.44416099\n",
      "Iteration 350, loss = 0.44193232\n",
      "Iteration 351, loss = 0.43632602\n",
      "Iteration 352, loss = 0.43721434\n",
      "Iteration 353, loss = 0.46132749\n",
      "Iteration 354, loss = 0.44799524\n",
      "Iteration 355, loss = 0.43805487\n",
      "Iteration 356, loss = 0.44135465\n",
      "Iteration 357, loss = 0.43291008\n",
      "Iteration 358, loss = 0.43707766\n",
      "Iteration 359, loss = 0.43309886\n",
      "Iteration 360, loss = 0.43766470\n",
      "Iteration 361, loss = 0.42940263\n",
      "Iteration 362, loss = 0.43392776\n",
      "Iteration 363, loss = 0.42945146\n",
      "Iteration 364, loss = 0.43509361\n",
      "Iteration 365, loss = 0.42801568\n",
      "Iteration 366, loss = 0.44040793\n",
      "Iteration 367, loss = 0.43376127\n",
      "Iteration 368, loss = 0.44025351\n",
      "Iteration 369, loss = 0.43746931\n",
      "Iteration 370, loss = 0.43313260\n",
      "Iteration 371, loss = 0.42583713\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 372, loss = 0.43127929\n",
      "Iteration 373, loss = 0.42713664\n",
      "Iteration 374, loss = 0.42694828\n",
      "Iteration 375, loss = 0.43392482\n",
      "Iteration 376, loss = 0.42251292\n",
      "Iteration 377, loss = 0.43146928\n",
      "Iteration 378, loss = 0.42762436\n",
      "Iteration 379, loss = 0.42514752\n",
      "Iteration 380, loss = 0.42802145\n",
      "Iteration 381, loss = 0.42327802\n",
      "Iteration 382, loss = 0.43214781\n",
      "Iteration 383, loss = 0.42835974\n",
      "Iteration 384, loss = 0.43061658\n",
      "Iteration 385, loss = 0.42945736\n",
      "Iteration 386, loss = 0.43561503\n",
      "Iteration 387, loss = 0.43089200\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.649458691557523\n",
      "Iteration 1, loss = 17.46186367\n",
      "Iteration 2, loss = 4.90497036\n",
      "Iteration 3, loss = 4.07570239\n",
      "Iteration 4, loss = 3.55275440\n",
      "Iteration 5, loss = 3.23604818\n",
      "Iteration 6, loss = 3.02897135\n",
      "Iteration 7, loss = 2.88587580\n",
      "Iteration 8, loss = 2.75789185\n",
      "Iteration 9, loss = 2.63634082\n",
      "Iteration 10, loss = 2.52790381\n",
      "Iteration 11, loss = 2.42107211\n",
      "Iteration 12, loss = 2.31225095\n",
      "Iteration 13, loss = 2.20931468\n",
      "Iteration 14, loss = 2.09154223\n",
      "Iteration 15, loss = 1.98494898\n",
      "Iteration 16, loss = 1.90243713\n",
      "Iteration 17, loss = 1.80700283\n",
      "Iteration 18, loss = 1.73008334\n",
      "Iteration 19, loss = 1.66504729\n",
      "Iteration 20, loss = 1.60731225\n",
      "Iteration 21, loss = 1.55694043\n",
      "Iteration 22, loss = 1.50392754\n",
      "Iteration 23, loss = 1.46208912\n",
      "Iteration 24, loss = 1.41989647\n",
      "Iteration 25, loss = 1.38418369\n",
      "Iteration 26, loss = 1.35794192\n",
      "Iteration 27, loss = 1.32652660\n",
      "Iteration 28, loss = 1.30389713\n",
      "Iteration 29, loss = 1.26986344\n",
      "Iteration 30, loss = 1.24289461\n",
      "Iteration 31, loss = 1.22217556\n",
      "Iteration 32, loss = 1.20005633\n",
      "Iteration 33, loss = 1.18902129\n",
      "Iteration 34, loss = 1.16192587\n",
      "Iteration 35, loss = 1.14355327\n",
      "Iteration 36, loss = 1.12695061\n",
      "Iteration 37, loss = 1.11266185\n",
      "Iteration 38, loss = 1.09817383\n",
      "Iteration 39, loss = 1.08896712\n",
      "Iteration 40, loss = 1.07293392\n",
      "Iteration 41, loss = 1.05509935\n",
      "Iteration 42, loss = 1.04558309\n",
      "Iteration 43, loss = 1.02853853\n",
      "Iteration 44, loss = 1.02539590\n",
      "Iteration 45, loss = 1.01554811\n",
      "Iteration 46, loss = 1.01170877\n",
      "Iteration 47, loss = 0.98693784\n",
      "Iteration 48, loss = 0.99703523\n",
      "Iteration 49, loss = 0.97811287\n",
      "Iteration 50, loss = 0.97484587\n",
      "Iteration 51, loss = 0.95865134\n",
      "Iteration 52, loss = 0.95564578\n",
      "Iteration 53, loss = 0.94501482\n",
      "Iteration 54, loss = 0.93581125\n",
      "Iteration 55, loss = 0.92510005\n",
      "Iteration 56, loss = 0.92293527\n",
      "Iteration 57, loss = 0.91236754\n",
      "Iteration 58, loss = 0.90379876\n",
      "Iteration 59, loss = 0.89956841\n",
      "Iteration 60, loss = 0.89654066\n",
      "Iteration 61, loss = 0.88328155\n",
      "Iteration 62, loss = 0.87973613\n",
      "Iteration 63, loss = 0.87377365\n",
      "Iteration 64, loss = 0.87604496\n",
      "Iteration 65, loss = 0.86860694\n",
      "Iteration 66, loss = 0.86607612\n",
      "Iteration 67, loss = 0.85831904\n",
      "Iteration 68, loss = 0.84399208\n",
      "Iteration 69, loss = 0.84452653\n",
      "Iteration 70, loss = 0.83768300\n",
      "Iteration 71, loss = 0.83492960\n",
      "Iteration 72, loss = 0.83038914\n",
      "Iteration 73, loss = 0.82267500\n",
      "Iteration 74, loss = 0.82781203\n",
      "Iteration 75, loss = 0.81238309\n",
      "Iteration 76, loss = 0.80882297\n",
      "Iteration 77, loss = 0.80426203\n",
      "Iteration 78, loss = 0.79982202\n",
      "Iteration 79, loss = 0.80729752\n",
      "Iteration 80, loss = 0.79935360\n",
      "Iteration 81, loss = 0.79151870\n",
      "Iteration 82, loss = 0.79242299\n",
      "Iteration 83, loss = 0.77846237\n",
      "Iteration 84, loss = 0.78139410\n",
      "Iteration 85, loss = 0.77566986\n",
      "Iteration 86, loss = 0.77813170\n",
      "Iteration 87, loss = 0.77952123\n",
      "Iteration 88, loss = 0.76910694\n",
      "Iteration 89, loss = 0.76081139\n",
      "Iteration 90, loss = 0.75964348\n",
      "Iteration 91, loss = 0.75833822\n",
      "Iteration 92, loss = 0.75861361\n",
      "Iteration 93, loss = 0.75311674\n",
      "Iteration 94, loss = 0.74231530\n",
      "Iteration 95, loss = 0.75106852\n",
      "Iteration 96, loss = 0.73923642\n",
      "Iteration 97, loss = 0.73098777\n",
      "Iteration 98, loss = 0.73715725\n",
      "Iteration 99, loss = 0.72908769\n",
      "Iteration 100, loss = 0.73623574\n",
      "Iteration 101, loss = 0.72235894\n",
      "Iteration 102, loss = 0.73303491\n",
      "Iteration 103, loss = 0.71916336\n",
      "Iteration 104, loss = 0.71225118\n",
      "Iteration 105, loss = 0.71757492\n",
      "Iteration 106, loss = 0.71463552\n",
      "Iteration 107, loss = 0.71575675\n",
      "Iteration 108, loss = 0.70398198\n",
      "Iteration 109, loss = 0.70264157\n",
      "Iteration 110, loss = 0.69891299\n",
      "Iteration 111, loss = 0.69335889\n",
      "Iteration 112, loss = 0.69346728\n",
      "Iteration 113, loss = 0.69792720\n",
      "Iteration 114, loss = 0.68900863\n",
      "Iteration 115, loss = 0.69214183\n",
      "Iteration 116, loss = 0.70622869\n",
      "Iteration 117, loss = 0.68437265\n",
      "Iteration 118, loss = 0.68032674\n",
      "Iteration 119, loss = 0.67555078\n",
      "Iteration 120, loss = 0.67620817\n",
      "Iteration 121, loss = 0.67491588\n",
      "Iteration 122, loss = 0.66970421\n",
      "Iteration 123, loss = 0.66354452\n",
      "Iteration 124, loss = 0.67050680\n",
      "Iteration 125, loss = 0.67393692\n",
      "Iteration 126, loss = 0.65692767\n",
      "Iteration 127, loss = 0.65676997\n",
      "Iteration 128, loss = 0.65916159\n",
      "Iteration 129, loss = 0.65189247\n",
      "Iteration 130, loss = 0.65427869\n",
      "Iteration 131, loss = 0.65126521\n",
      "Iteration 132, loss = 0.64885284\n",
      "Iteration 133, loss = 0.64465692\n",
      "Iteration 134, loss = 0.64341110\n",
      "Iteration 135, loss = 0.64489198\n",
      "Iteration 136, loss = 0.63873771\n",
      "Iteration 137, loss = 0.63796431\n",
      "Iteration 138, loss = 0.63156736\n",
      "Iteration 139, loss = 0.63043559\n",
      "Iteration 140, loss = 0.63273345\n",
      "Iteration 141, loss = 0.63284330\n",
      "Iteration 142, loss = 0.62649728\n",
      "Iteration 143, loss = 0.62392526\n",
      "Iteration 144, loss = 0.63786166\n",
      "Iteration 145, loss = 0.61660281\n",
      "Iteration 146, loss = 0.62545876\n",
      "Iteration 147, loss = 0.62123460\n",
      "Iteration 148, loss = 0.62147784\n",
      "Iteration 149, loss = 0.62532979\n",
      "Iteration 150, loss = 0.61121946\n",
      "Iteration 151, loss = 0.61436544\n",
      "Iteration 152, loss = 0.60523625\n",
      "Iteration 153, loss = 0.60522487\n",
      "Iteration 154, loss = 0.61250931\n",
      "Iteration 155, loss = 0.60780628\n",
      "Iteration 156, loss = 0.60325207\n",
      "Iteration 157, loss = 0.60132594\n",
      "Iteration 158, loss = 0.59258515\n",
      "Iteration 159, loss = 0.59085879\n",
      "Iteration 160, loss = 0.59514052\n",
      "Iteration 161, loss = 0.59046500\n",
      "Iteration 162, loss = 0.59400377\n",
      "Iteration 163, loss = 0.59966848\n",
      "Iteration 164, loss = 0.58857904\n",
      "Iteration 165, loss = 0.58859704\n",
      "Iteration 166, loss = 0.59622919\n",
      "Iteration 167, loss = 0.59028130\n",
      "Iteration 168, loss = 0.58385973\n",
      "Iteration 169, loss = 0.58916295\n",
      "Iteration 170, loss = 0.57953452\n",
      "Iteration 171, loss = 0.57558310\n",
      "Iteration 172, loss = 0.59379277\n",
      "Iteration 173, loss = 0.58060486\n",
      "Iteration 174, loss = 0.57402619\n",
      "Iteration 175, loss = 0.58167671\n",
      "Iteration 176, loss = 0.56904535\n",
      "Iteration 177, loss = 0.57839455\n",
      "Iteration 178, loss = 0.57189507\n",
      "Iteration 179, loss = 0.57002084\n",
      "Iteration 180, loss = 0.57052783\n",
      "Iteration 181, loss = 0.56657359\n",
      "Iteration 182, loss = 0.56598018\n",
      "Iteration 183, loss = 0.56981807\n",
      "Iteration 184, loss = 0.56411816\n",
      "Iteration 185, loss = 0.55972685\n",
      "Iteration 186, loss = 0.56991109\n",
      "Iteration 187, loss = 0.56100655\n",
      "Iteration 188, loss = 0.55955097\n",
      "Iteration 189, loss = 0.55332869\n",
      "Iteration 190, loss = 0.55206896\n",
      "Iteration 191, loss = 0.55438884\n",
      "Iteration 192, loss = 0.55194364\n",
      "Iteration 193, loss = 0.55396207\n",
      "Iteration 194, loss = 0.55299149\n",
      "Iteration 195, loss = 0.54587379\n",
      "Iteration 196, loss = 0.55741557\n",
      "Iteration 197, loss = 0.55080489\n",
      "Iteration 198, loss = 0.55873857\n",
      "Iteration 199, loss = 0.55261721\n",
      "Iteration 200, loss = 0.54845954\n",
      "Iteration 201, loss = 0.54976176\n",
      "Iteration 202, loss = 0.54107638\n",
      "Iteration 203, loss = 0.54204103\n",
      "Iteration 204, loss = 0.54029148\n",
      "Iteration 205, loss = 0.54072667\n",
      "Iteration 206, loss = 0.54324514\n",
      "Iteration 207, loss = 0.53960803\n",
      "Iteration 208, loss = 0.53305665\n",
      "Iteration 209, loss = 0.53250655\n",
      "Iteration 210, loss = 0.55013337\n",
      "Iteration 211, loss = 0.53633576\n",
      "Iteration 212, loss = 0.53446004\n",
      "Iteration 213, loss = 0.52433421\n",
      "Iteration 214, loss = 0.52710994\n",
      "Iteration 215, loss = 0.52943302\n",
      "Iteration 216, loss = 0.52763216\n",
      "Iteration 217, loss = 0.52449736\n",
      "Iteration 218, loss = 0.52724568\n",
      "Iteration 219, loss = 0.52698039\n",
      "Iteration 220, loss = 0.56014177\n",
      "Iteration 221, loss = 0.51872579\n",
      "Iteration 222, loss = 0.53520652\n",
      "Iteration 223, loss = 0.52107063\n",
      "Iteration 224, loss = 0.52189472\n",
      "Iteration 225, loss = 0.51995086\n",
      "Iteration 226, loss = 0.53093914\n",
      "Iteration 227, loss = 0.51500936\n",
      "Iteration 228, loss = 0.52464895\n",
      "Iteration 229, loss = 0.51377466\n",
      "Iteration 230, loss = 0.51278804\n",
      "Iteration 231, loss = 0.51892764\n",
      "Iteration 232, loss = 0.51622864\n",
      "Iteration 233, loss = 0.51585986\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 234, loss = 0.50866916\n",
      "Iteration 235, loss = 0.51541188\n",
      "Iteration 236, loss = 0.50883258\n",
      "Iteration 237, loss = 0.51480831\n",
      "Iteration 238, loss = 0.51337005\n",
      "Iteration 239, loss = 0.51185615\n",
      "Iteration 240, loss = 0.51030867\n",
      "Iteration 241, loss = 0.51228492\n",
      "Iteration 242, loss = 0.50889577\n",
      "Iteration 243, loss = 0.52485366\n",
      "Iteration 244, loss = 0.50515383\n",
      "Iteration 245, loss = 0.50009148\n",
      "Iteration 246, loss = 0.50293083\n",
      "Iteration 247, loss = 0.49665695\n",
      "Iteration 248, loss = 0.50750204\n",
      "Iteration 249, loss = 0.51436594\n",
      "Iteration 250, loss = 0.49765148\n",
      "Iteration 251, loss = 0.50920400\n",
      "Iteration 252, loss = 0.49908464\n",
      "Iteration 253, loss = 0.49668301\n",
      "Iteration 254, loss = 0.50293161\n",
      "Iteration 255, loss = 0.49275173\n",
      "Iteration 256, loss = 0.49427503\n",
      "Iteration 257, loss = 0.49530526\n",
      "Iteration 258, loss = 0.48729035\n",
      "Iteration 259, loss = 0.49737549\n",
      "Iteration 260, loss = 0.50050329\n",
      "Iteration 261, loss = 0.49382480\n",
      "Iteration 262, loss = 0.49368215\n",
      "Iteration 263, loss = 0.49953323\n",
      "Iteration 264, loss = 0.49467293\n",
      "Iteration 265, loss = 0.49053073\n",
      "Iteration 266, loss = 0.48136396\n",
      "Iteration 267, loss = 0.48703744\n",
      "Iteration 268, loss = 0.49551636\n",
      "Iteration 269, loss = 0.49656368\n",
      "Iteration 270, loss = 0.49272950\n",
      "Iteration 271, loss = 0.48206344\n",
      "Iteration 272, loss = 0.48873396\n",
      "Iteration 273, loss = 0.48830699\n",
      "Iteration 274, loss = 0.48648821\n",
      "Iteration 275, loss = 0.48040962\n",
      "Iteration 276, loss = 0.48046153\n",
      "Iteration 277, loss = 0.48001301\n",
      "Iteration 278, loss = 0.47492883\n",
      "Iteration 279, loss = 0.49212857\n",
      "Iteration 280, loss = 0.48134139\n",
      "Iteration 281, loss = 0.47406779\n",
      "Iteration 282, loss = 0.48333114\n",
      "Iteration 283, loss = 0.48404475\n",
      "Iteration 284, loss = 0.49656380\n",
      "Iteration 285, loss = 0.47882602\n",
      "Iteration 286, loss = 0.47911414\n",
      "Iteration 287, loss = 0.47121611\n",
      "Iteration 288, loss = 0.47640140\n",
      "Iteration 289, loss = 0.47279100\n",
      "Iteration 290, loss = 0.47617271\n",
      "Iteration 291, loss = 0.47014459\n",
      "Iteration 292, loss = 0.47305774\n",
      "Iteration 293, loss = 0.47840182\n",
      "Iteration 294, loss = 0.46899736\n",
      "Iteration 295, loss = 0.46773300\n",
      "Iteration 296, loss = 0.46850828\n",
      "Iteration 297, loss = 0.47315929\n",
      "Iteration 298, loss = 0.47281552\n",
      "Iteration 299, loss = 0.46984333\n",
      "Iteration 300, loss = 0.47197725\n",
      "Iteration 301, loss = 0.46399802\n",
      "Iteration 302, loss = 0.46532735\n",
      "Iteration 303, loss = 0.46970516\n",
      "Iteration 304, loss = 0.46917854\n",
      "Iteration 305, loss = 0.47084724\n",
      "Iteration 306, loss = 0.48260519\n",
      "Iteration 307, loss = 0.47774129\n",
      "Iteration 308, loss = 0.46341756\n",
      "Iteration 309, loss = 0.46587907\n",
      "Iteration 310, loss = 0.46043582\n",
      "Iteration 311, loss = 0.46809623\n",
      "Iteration 312, loss = 0.46219263\n",
      "Iteration 313, loss = 0.46326742\n",
      "Iteration 314, loss = 0.46090693\n",
      "Iteration 315, loss = 0.46010162\n",
      "Iteration 316, loss = 0.45921977\n",
      "Iteration 317, loss = 0.46099436\n",
      "Iteration 318, loss = 0.45318546\n",
      "Iteration 319, loss = 0.46834841\n",
      "Iteration 320, loss = 0.45242189\n",
      "Iteration 321, loss = 0.46492063\n",
      "Iteration 322, loss = 0.45537374\n",
      "Iteration 323, loss = 0.45494030\n",
      "Iteration 324, loss = 0.45730644\n",
      "Iteration 325, loss = 0.45197647\n",
      "Iteration 326, loss = 0.47220352\n",
      "Iteration 327, loss = 0.46140778\n",
      "Iteration 328, loss = 0.46054585\n",
      "Iteration 329, loss = 0.54111037\n",
      "Iteration 330, loss = 0.45780190\n",
      "Iteration 331, loss = 0.46170421\n",
      "Iteration 332, loss = 0.45753127\n",
      "Iteration 333, loss = 0.45988555\n",
      "Iteration 334, loss = 0.45372573\n",
      "Iteration 335, loss = 0.45632465\n",
      "Iteration 336, loss = 0.45540943\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.7210662030575676\n",
      "Iteration 1, loss = 17.47247825\n",
      "Iteration 2, loss = 4.88190483\n",
      "Iteration 3, loss = 4.06667002\n",
      "Iteration 4, loss = 3.54658692\n",
      "Iteration 5, loss = 3.23732633\n",
      "Iteration 6, loss = 3.02994345\n",
      "Iteration 7, loss = 2.87822835\n",
      "Iteration 8, loss = 2.74800260\n",
      "Iteration 9, loss = 2.63301347\n",
      "Iteration 10, loss = 2.51922259\n",
      "Iteration 11, loss = 2.40995053\n",
      "Iteration 12, loss = 2.29885007\n",
      "Iteration 13, loss = 2.19855337\n",
      "Iteration 14, loss = 2.08214565\n",
      "Iteration 15, loss = 1.98867214\n",
      "Iteration 16, loss = 1.89405228\n",
      "Iteration 17, loss = 1.80682884\n",
      "Iteration 18, loss = 1.73691286\n",
      "Iteration 19, loss = 1.67411778\n",
      "Iteration 20, loss = 1.61711919\n",
      "Iteration 21, loss = 1.56837570\n",
      "Iteration 22, loss = 1.52205073\n",
      "Iteration 23, loss = 1.48174866\n",
      "Iteration 24, loss = 1.44207339\n",
      "Iteration 25, loss = 1.40755327\n",
      "Iteration 26, loss = 1.38086514\n",
      "Iteration 27, loss = 1.34923968\n",
      "Iteration 28, loss = 1.32550010\n",
      "Iteration 29, loss = 1.29907759\n",
      "Iteration 30, loss = 1.26942527\n",
      "Iteration 31, loss = 1.25190903\n",
      "Iteration 32, loss = 1.22906659\n",
      "Iteration 33, loss = 1.21195185\n",
      "Iteration 34, loss = 1.18875979\n",
      "Iteration 35, loss = 1.16506620\n",
      "Iteration 36, loss = 1.14830223\n",
      "Iteration 37, loss = 1.13603422\n",
      "Iteration 38, loss = 1.12028121\n",
      "Iteration 39, loss = 1.10464626\n",
      "Iteration 40, loss = 1.09601082\n",
      "Iteration 41, loss = 1.07833147\n",
      "Iteration 42, loss = 1.08594043\n",
      "Iteration 43, loss = 1.04937329\n",
      "Iteration 44, loss = 1.04419958\n",
      "Iteration 45, loss = 1.02924127\n",
      "Iteration 46, loss = 1.02917103\n",
      "Iteration 47, loss = 1.00244665\n",
      "Iteration 48, loss = 1.01508594\n",
      "Iteration 49, loss = 0.98870317\n",
      "Iteration 50, loss = 0.99202973\n",
      "Iteration 51, loss = 0.97128943\n",
      "Iteration 52, loss = 0.96673150\n",
      "Iteration 53, loss = 0.95790469\n",
      "Iteration 54, loss = 0.94590123\n",
      "Iteration 55, loss = 0.93760007\n",
      "Iteration 56, loss = 0.93394385\n",
      "Iteration 57, loss = 0.92766206\n",
      "Iteration 58, loss = 0.91845283\n",
      "Iteration 59, loss = 0.90734586\n",
      "Iteration 60, loss = 0.90584776\n",
      "Iteration 61, loss = 0.89396109\n",
      "Iteration 62, loss = 0.88559161\n",
      "Iteration 63, loss = 0.88366641\n",
      "Iteration 64, loss = 0.88289097\n",
      "Iteration 65, loss = 0.87926267\n",
      "Iteration 66, loss = 0.87239070\n",
      "Iteration 67, loss = 0.87081251\n",
      "Iteration 68, loss = 0.85656225\n",
      "Iteration 69, loss = 0.85165492\n",
      "Iteration 70, loss = 0.85102321\n",
      "Iteration 71, loss = 0.83812700\n",
      "Iteration 72, loss = 0.84014149\n",
      "Iteration 73, loss = 0.83450142\n",
      "Iteration 74, loss = 0.83161383\n",
      "Iteration 75, loss = 0.82417216\n",
      "Iteration 76, loss = 0.81486255\n",
      "Iteration 77, loss = 0.81123345\n",
      "Iteration 78, loss = 0.80271905\n",
      "Iteration 79, loss = 0.80552187\n",
      "Iteration 80, loss = 0.80196539\n",
      "Iteration 81, loss = 0.79309105\n",
      "Iteration 82, loss = 0.79628499\n",
      "Iteration 83, loss = 0.79393136\n",
      "Iteration 84, loss = 0.78143925\n",
      "Iteration 85, loss = 0.77348998\n",
      "Iteration 86, loss = 0.77413840\n",
      "Iteration 87, loss = 0.77492726\n",
      "Iteration 88, loss = 0.77358219\n",
      "Iteration 89, loss = 0.76108083\n",
      "Iteration 90, loss = 0.76091189\n",
      "Iteration 91, loss = 0.75745535\n",
      "Iteration 92, loss = 0.75079248\n",
      "Iteration 93, loss = 0.75081089\n",
      "Iteration 94, loss = 0.74556614\n",
      "Iteration 95, loss = 0.74794617\n",
      "Iteration 96, loss = 0.73874490\n",
      "Iteration 97, loss = 0.73294595\n",
      "Iteration 98, loss = 0.73274684\n",
      "Iteration 99, loss = 0.72654292\n",
      "Iteration 100, loss = 0.72758790\n",
      "Iteration 101, loss = 0.72014202\n",
      "Iteration 102, loss = 0.72648334\n",
      "Iteration 103, loss = 0.71523958\n",
      "Iteration 104, loss = 0.71303594\n",
      "Iteration 105, loss = 0.71316505\n",
      "Iteration 106, loss = 0.70919260\n",
      "Iteration 107, loss = 0.70829065\n",
      "Iteration 108, loss = 0.69970935\n",
      "Iteration 109, loss = 0.69791782\n",
      "Iteration 110, loss = 0.69942886\n",
      "Iteration 111, loss = 0.68980047\n",
      "Iteration 112, loss = 0.69131814\n",
      "Iteration 113, loss = 0.69207705\n",
      "Iteration 114, loss = 0.68923991\n",
      "Iteration 115, loss = 0.69240252\n",
      "Iteration 116, loss = 0.69431282\n",
      "Iteration 117, loss = 0.67987136\n",
      "Iteration 118, loss = 0.68287478\n",
      "Iteration 119, loss = 0.66831162\n",
      "Iteration 120, loss = 0.67658719\n",
      "Iteration 121, loss = 0.67350050\n",
      "Iteration 122, loss = 0.66977129\n",
      "Iteration 123, loss = 0.66486574\n",
      "Iteration 124, loss = 0.66706102\n",
      "Iteration 125, loss = 0.66801714\n",
      "Iteration 126, loss = 0.66004558\n",
      "Iteration 127, loss = 0.65507777\n",
      "Iteration 128, loss = 0.65512473\n",
      "Iteration 129, loss = 0.65016349\n",
      "Iteration 130, loss = 0.66725263\n",
      "Iteration 131, loss = 0.64620662\n",
      "Iteration 132, loss = 0.64581124\n",
      "Iteration 133, loss = 0.64635936\n",
      "Iteration 134, loss = 0.64792925\n",
      "Iteration 135, loss = 0.65222569\n",
      "Iteration 136, loss = 0.63896965\n",
      "Iteration 137, loss = 0.63762942\n",
      "Iteration 138, loss = 0.63535976\n",
      "Iteration 139, loss = 0.63449373\n",
      "Iteration 140, loss = 0.63504819\n",
      "Iteration 141, loss = 0.63598689\n",
      "Iteration 142, loss = 0.62356568\n",
      "Iteration 143, loss = 0.62951225\n",
      "Iteration 144, loss = 0.62673672\n",
      "Iteration 145, loss = 0.61524067\n",
      "Iteration 146, loss = 0.62262192\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 147, loss = 0.62703985\n",
      "Iteration 148, loss = 0.62719376\n",
      "Iteration 149, loss = 0.62993158\n",
      "Iteration 150, loss = 0.60964076\n",
      "Iteration 151, loss = 0.61941773\n",
      "Iteration 152, loss = 0.61197980\n",
      "Iteration 153, loss = 0.61074425\n",
      "Iteration 154, loss = 0.61222424\n",
      "Iteration 155, loss = 0.61153537\n",
      "Iteration 156, loss = 0.60413755\n",
      "Iteration 157, loss = 0.60876415\n",
      "Iteration 158, loss = 0.60078729\n",
      "Iteration 159, loss = 0.59865268\n",
      "Iteration 160, loss = 0.59811850\n",
      "Iteration 161, loss = 0.59317876\n",
      "Iteration 162, loss = 0.59767261\n",
      "Iteration 163, loss = 0.60996857\n",
      "Iteration 164, loss = 0.59489634\n",
      "Iteration 165, loss = 0.58765044\n",
      "Iteration 166, loss = 0.59433176\n",
      "Iteration 167, loss = 0.59100256\n",
      "Iteration 168, loss = 0.58682927\n",
      "Iteration 169, loss = 0.59349862\n",
      "Iteration 170, loss = 0.58067277\n",
      "Iteration 171, loss = 0.57933176\n",
      "Iteration 172, loss = 0.58205107\n",
      "Iteration 173, loss = 0.58376106\n",
      "Iteration 174, loss = 0.57453437\n",
      "Iteration 175, loss = 0.57712388\n",
      "Iteration 176, loss = 0.58089476\n",
      "Iteration 177, loss = 0.58773839\n",
      "Iteration 178, loss = 0.57121535\n",
      "Iteration 179, loss = 0.56849932\n",
      "Iteration 180, loss = 0.57925181\n",
      "Iteration 181, loss = 0.56857152\n",
      "Iteration 182, loss = 0.57417838\n",
      "Iteration 183, loss = 0.56652387\n",
      "Iteration 184, loss = 0.56290751\n",
      "Iteration 185, loss = 0.56899128\n",
      "Iteration 186, loss = 0.57548288\n",
      "Iteration 187, loss = 0.56381826\n",
      "Iteration 188, loss = 0.56586003\n",
      "Iteration 189, loss = 0.55665872\n",
      "Iteration 190, loss = 0.55486627\n",
      "Iteration 191, loss = 0.55524790\n",
      "Iteration 192, loss = 0.55634810\n",
      "Iteration 193, loss = 0.55673557\n",
      "Iteration 194, loss = 0.55706898\n",
      "Iteration 195, loss = 0.55060144\n",
      "Iteration 196, loss = 0.56554159\n",
      "Iteration 197, loss = 0.55770536\n",
      "Iteration 198, loss = 0.56133053\n",
      "Iteration 199, loss = 0.55182408\n",
      "Iteration 200, loss = 0.55188114\n",
      "Iteration 201, loss = 0.55598031\n",
      "Iteration 202, loss = 0.54085504\n",
      "Iteration 203, loss = 0.54557355\n",
      "Iteration 204, loss = 0.53947464\n",
      "Iteration 205, loss = 0.53848131\n",
      "Iteration 206, loss = 0.54942491\n",
      "Iteration 207, loss = 0.53943785\n",
      "Iteration 208, loss = 0.54060351\n",
      "Iteration 209, loss = 0.55902373\n",
      "Iteration 210, loss = 0.55004710\n",
      "Iteration 211, loss = 0.53715593\n",
      "Iteration 212, loss = 0.53813837\n",
      "Iteration 213, loss = 0.52897368\n",
      "Iteration 214, loss = 0.53167663\n",
      "Iteration 215, loss = 0.52933953\n",
      "Iteration 216, loss = 0.53059002\n",
      "Iteration 217, loss = 0.52388600\n",
      "Iteration 218, loss = 0.52605202\n",
      "Iteration 219, loss = 0.53190135\n",
      "Iteration 220, loss = 0.54545001\n",
      "Iteration 221, loss = 0.51940331\n",
      "Iteration 222, loss = 0.53026696\n",
      "Iteration 223, loss = 0.52561773\n",
      "Iteration 224, loss = 0.52261627\n",
      "Iteration 225, loss = 0.52015969\n",
      "Iteration 226, loss = 0.53693647\n",
      "Iteration 227, loss = 0.52083956\n",
      "Iteration 228, loss = 0.52004612\n",
      "Iteration 229, loss = 0.51352601\n",
      "Iteration 230, loss = 0.51621769\n",
      "Iteration 231, loss = 0.51799429\n",
      "Iteration 232, loss = 0.51659238\n",
      "Iteration 233, loss = 0.51676833\n",
      "Iteration 234, loss = 0.51416805\n",
      "Iteration 235, loss = 0.51312760\n",
      "Iteration 236, loss = 0.50967618\n",
      "Iteration 237, loss = 0.50904720\n",
      "Iteration 238, loss = 0.51081291\n",
      "Iteration 239, loss = 0.51135077\n",
      "Iteration 240, loss = 0.50706983\n",
      "Iteration 241, loss = 0.51006226\n",
      "Iteration 242, loss = 0.50134362\n",
      "Iteration 243, loss = 0.50768649\n",
      "Iteration 244, loss = 0.50457401\n",
      "Iteration 245, loss = 0.49621179\n",
      "Iteration 246, loss = 0.50313766\n",
      "Iteration 247, loss = 0.49727388\n",
      "Iteration 248, loss = 0.50178956\n",
      "Iteration 249, loss = 0.50589673\n",
      "Iteration 250, loss = 0.49900319\n",
      "Iteration 251, loss = 0.50387509\n",
      "Iteration 252, loss = 0.50164676\n",
      "Iteration 253, loss = 0.49879481\n",
      "Iteration 254, loss = 0.50194654\n",
      "Iteration 255, loss = 0.49543853\n",
      "Iteration 256, loss = 0.49697507\n",
      "Iteration 257, loss = 0.49835631\n",
      "Iteration 258, loss = 0.48588193\n",
      "Iteration 259, loss = 0.49456483\n",
      "Iteration 260, loss = 0.50043812\n",
      "Iteration 261, loss = 0.49259204\n",
      "Iteration 262, loss = 0.48617044\n",
      "Iteration 263, loss = 0.49108240\n",
      "Iteration 264, loss = 0.49162258\n",
      "Iteration 265, loss = 0.48670930\n",
      "Iteration 266, loss = 0.48446710\n",
      "Iteration 267, loss = 0.48857571\n",
      "Iteration 268, loss = 0.48352227\n",
      "Iteration 269, loss = 0.49178944\n",
      "Iteration 270, loss = 0.48720562\n",
      "Iteration 271, loss = 0.48665021\n",
      "Iteration 272, loss = 0.48294427\n",
      "Iteration 273, loss = 0.48652156\n",
      "Iteration 274, loss = 0.49623335\n",
      "Iteration 275, loss = 0.48342949\n",
      "Iteration 276, loss = 0.48539369\n",
      "Iteration 277, loss = 0.48494799\n",
      "Iteration 278, loss = 0.47664678\n",
      "Iteration 279, loss = 0.48316694\n",
      "Iteration 280, loss = 0.47687666\n",
      "Iteration 281, loss = 0.47124630\n",
      "Iteration 282, loss = 0.48255885\n",
      "Iteration 283, loss = 0.48096535\n",
      "Iteration 284, loss = 0.48518930\n",
      "Iteration 285, loss = 0.47642846\n",
      "Iteration 286, loss = 0.47562029\n",
      "Iteration 287, loss = 0.47738158\n",
      "Iteration 288, loss = 0.46808416\n",
      "Iteration 289, loss = 0.47319105\n",
      "Iteration 290, loss = 0.47122568\n",
      "Iteration 291, loss = 0.46744627\n",
      "Iteration 292, loss = 0.47355141\n",
      "Iteration 293, loss = 0.46694895\n",
      "Iteration 294, loss = 0.46912065\n",
      "Iteration 295, loss = 0.46675786\n",
      "Iteration 296, loss = 0.46393183\n",
      "Iteration 297, loss = 0.46798504\n",
      "Iteration 298, loss = 0.46549345\n",
      "Iteration 299, loss = 0.46527015\n",
      "Iteration 300, loss = 0.46911847\n",
      "Iteration 301, loss = 0.46356742\n",
      "Iteration 302, loss = 0.45919232\n",
      "Iteration 303, loss = 0.45885640\n",
      "Iteration 304, loss = 0.46946037\n",
      "Iteration 305, loss = 0.47458707\n",
      "Iteration 306, loss = 0.46563105\n",
      "Iteration 307, loss = 0.46499750\n",
      "Iteration 308, loss = 0.46263870\n",
      "Iteration 309, loss = 0.46661705\n",
      "Iteration 310, loss = 0.46295797\n",
      "Iteration 311, loss = 0.45789045\n",
      "Iteration 312, loss = 0.45853613\n",
      "Iteration 313, loss = 0.46521788\n",
      "Iteration 314, loss = 0.46116448\n",
      "Iteration 315, loss = 0.45699392\n",
      "Iteration 316, loss = 0.45208786\n",
      "Iteration 317, loss = 0.45628003\n",
      "Iteration 318, loss = 0.45199046\n",
      "Iteration 319, loss = 0.46484716\n",
      "Iteration 320, loss = 0.45259512\n",
      "Iteration 321, loss = 0.46429160\n",
      "Iteration 322, loss = 0.45443856\n",
      "Iteration 323, loss = 0.45132851\n",
      "Iteration 324, loss = 0.45349603\n",
      "Iteration 325, loss = 0.44602465\n",
      "Iteration 326, loss = 0.45431926\n",
      "Iteration 327, loss = 0.45735481\n",
      "Iteration 328, loss = 0.45424715\n",
      "Iteration 329, loss = 0.45212629\n",
      "Iteration 330, loss = 0.44624477\n",
      "Iteration 331, loss = 0.45176910\n",
      "Iteration 332, loss = 0.44638939\n",
      "Iteration 333, loss = 0.44645369\n",
      "Iteration 334, loss = 0.45155353\n",
      "Iteration 335, loss = 0.44890279\n",
      "Iteration 336, loss = 0.44678515\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.425684034321125\n",
      "Iteration 1, loss = 17.39361761\n",
      "Iteration 2, loss = 4.94833634\n",
      "Iteration 3, loss = 4.08637620\n",
      "Iteration 4, loss = 3.55406156\n",
      "Iteration 5, loss = 3.24810612\n",
      "Iteration 6, loss = 3.03746322\n",
      "Iteration 7, loss = 2.88327758\n",
      "Iteration 8, loss = 2.74879825\n",
      "Iteration 9, loss = 2.62982888\n",
      "Iteration 10, loss = 2.51311821\n",
      "Iteration 11, loss = 2.39748764\n",
      "Iteration 12, loss = 2.28070711\n",
      "Iteration 13, loss = 2.18243568\n",
      "Iteration 14, loss = 2.07153306\n",
      "Iteration 15, loss = 1.96935423\n",
      "Iteration 16, loss = 1.87888764\n",
      "Iteration 17, loss = 1.79683447\n",
      "Iteration 18, loss = 1.72162773\n",
      "Iteration 19, loss = 1.65414400\n",
      "Iteration 20, loss = 1.59763323\n",
      "Iteration 21, loss = 1.54718565\n",
      "Iteration 22, loss = 1.49585222\n",
      "Iteration 23, loss = 1.45110878\n",
      "Iteration 24, loss = 1.40843608\n",
      "Iteration 25, loss = 1.37798407\n",
      "Iteration 26, loss = 1.34614424\n",
      "Iteration 27, loss = 1.31370288\n",
      "Iteration 28, loss = 1.28771361\n",
      "Iteration 29, loss = 1.25973300\n",
      "Iteration 30, loss = 1.23392867\n",
      "Iteration 31, loss = 1.21534905\n",
      "Iteration 32, loss = 1.18534285\n",
      "Iteration 33, loss = 1.17234078\n",
      "Iteration 34, loss = 1.15451296\n",
      "Iteration 35, loss = 1.13628348\n",
      "Iteration 36, loss = 1.12817093\n",
      "Iteration 37, loss = 1.10820262\n",
      "Iteration 38, loss = 1.09055899\n",
      "Iteration 39, loss = 1.08062564\n",
      "Iteration 40, loss = 1.07004874\n",
      "Iteration 41, loss = 1.04697322\n",
      "Iteration 42, loss = 1.04660969\n",
      "Iteration 43, loss = 1.03271110\n",
      "Iteration 44, loss = 1.02027268\n",
      "Iteration 45, loss = 1.01390422\n",
      "Iteration 46, loss = 1.00067184\n",
      "Iteration 47, loss = 0.99136096\n",
      "Iteration 48, loss = 0.97838636\n",
      "Iteration 49, loss = 0.96976422\n",
      "Iteration 50, loss = 0.95590480\n",
      "Iteration 51, loss = 0.95430302\n",
      "Iteration 52, loss = 0.95652904\n",
      "Iteration 53, loss = 0.94116491\n",
      "Iteration 54, loss = 0.93815520\n",
      "Iteration 55, loss = 0.93693348\n",
      "Iteration 56, loss = 0.92196395\n",
      "Iteration 57, loss = 0.91878233\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 58, loss = 0.91634115\n",
      "Iteration 59, loss = 0.90551673\n",
      "Iteration 60, loss = 0.89930984\n",
      "Iteration 61, loss = 0.89920424\n",
      "Iteration 62, loss = 0.87999756\n",
      "Iteration 63, loss = 0.87972875\n",
      "Iteration 64, loss = 0.87410644\n",
      "Iteration 65, loss = 0.86504080\n",
      "Iteration 66, loss = 0.86589629\n",
      "Iteration 67, loss = 0.86014765\n",
      "Iteration 68, loss = 0.84639740\n",
      "Iteration 69, loss = 0.84162425\n",
      "Iteration 70, loss = 0.84093349\n",
      "Iteration 71, loss = 0.83789159\n",
      "Iteration 72, loss = 0.83234191\n",
      "Iteration 73, loss = 0.82924582\n",
      "Iteration 74, loss = 0.81914175\n",
      "Iteration 75, loss = 0.82591192\n",
      "Iteration 76, loss = 0.81038558\n",
      "Iteration 77, loss = 0.81879055\n",
      "Iteration 78, loss = 0.80458195\n",
      "Iteration 79, loss = 0.79960194\n",
      "Iteration 80, loss = 0.79935370\n",
      "Iteration 81, loss = 0.79247280\n",
      "Iteration 82, loss = 0.78456070\n",
      "Iteration 83, loss = 0.78638129\n",
      "Iteration 84, loss = 0.78489983\n",
      "Iteration 85, loss = 0.77953953\n",
      "Iteration 86, loss = 0.77252009\n",
      "Iteration 87, loss = 0.77071103\n",
      "Iteration 88, loss = 0.76628943\n",
      "Iteration 89, loss = 0.76242712\n",
      "Iteration 90, loss = 0.76000492\n",
      "Iteration 91, loss = 0.75727940\n",
      "Iteration 92, loss = 0.74989386\n",
      "Iteration 93, loss = 0.74920547\n",
      "Iteration 94, loss = 0.74939661\n",
      "Iteration 95, loss = 0.74886581\n",
      "Iteration 96, loss = 0.74849946\n",
      "Iteration 97, loss = 0.75654770\n",
      "Iteration 98, loss = 0.73753926\n",
      "Iteration 99, loss = 0.73537851\n",
      "Iteration 100, loss = 0.74054136\n",
      "Iteration 101, loss = 0.72497783\n",
      "Iteration 102, loss = 0.72619551\n",
      "Iteration 103, loss = 0.71574585\n",
      "Iteration 104, loss = 0.71917230\n",
      "Iteration 105, loss = 0.71873451\n",
      "Iteration 106, loss = 0.71755877\n",
      "Iteration 107, loss = 0.70487045\n",
      "Iteration 108, loss = 0.70271404\n",
      "Iteration 109, loss = 0.70204777\n",
      "Iteration 110, loss = 0.69600454\n",
      "Iteration 111, loss = 0.69997809\n",
      "Iteration 112, loss = 0.69272039\n",
      "Iteration 113, loss = 0.68875442\n",
      "Iteration 114, loss = 0.68650166\n",
      "Iteration 115, loss = 0.68965885\n",
      "Iteration 116, loss = 0.68347722\n",
      "Iteration 117, loss = 0.68426749\n",
      "Iteration 118, loss = 0.67479312\n",
      "Iteration 119, loss = 0.67813150\n",
      "Iteration 120, loss = 0.67184540\n",
      "Iteration 121, loss = 0.67485057\n",
      "Iteration 122, loss = 0.67119174\n",
      "Iteration 123, loss = 0.66684071\n",
      "Iteration 124, loss = 0.66271057\n",
      "Iteration 125, loss = 0.66399732\n",
      "Iteration 126, loss = 0.66179880\n",
      "Iteration 127, loss = 0.66295005\n",
      "Iteration 128, loss = 0.66137765\n",
      "Iteration 129, loss = 0.65033268\n",
      "Iteration 130, loss = 0.65552565\n",
      "Iteration 131, loss = 0.65241579\n",
      "Iteration 132, loss = 0.65021601\n",
      "Iteration 133, loss = 0.64801025\n",
      "Iteration 134, loss = 0.63909659\n",
      "Iteration 135, loss = 0.63637718\n",
      "Iteration 136, loss = 0.63935636\n",
      "Iteration 137, loss = 0.63764456\n",
      "Iteration 138, loss = 0.66608166\n",
      "Iteration 139, loss = 0.64712801\n",
      "Iteration 140, loss = 0.63665974\n",
      "Iteration 141, loss = 0.63100616\n",
      "Iteration 142, loss = 0.63803546\n",
      "Iteration 143, loss = 0.62818945\n",
      "Iteration 144, loss = 0.63652541\n",
      "Iteration 145, loss = 0.62157816\n",
      "Iteration 146, loss = 0.63330618\n",
      "Iteration 147, loss = 0.62244650\n",
      "Iteration 148, loss = 0.62668141\n",
      "Iteration 149, loss = 0.61615667\n",
      "Iteration 150, loss = 0.62224079\n",
      "Iteration 151, loss = 0.61694303\n",
      "Iteration 152, loss = 0.62266164\n",
      "Iteration 153, loss = 0.61443690\n",
      "Iteration 154, loss = 0.61495923\n",
      "Iteration 155, loss = 0.61428304\n",
      "Iteration 156, loss = 0.60702170\n",
      "Iteration 157, loss = 0.60948334\n",
      "Iteration 158, loss = 0.60201181\n",
      "Iteration 159, loss = 0.60554101\n",
      "Iteration 160, loss = 0.59955723\n",
      "Iteration 161, loss = 0.59625552\n",
      "Iteration 162, loss = 0.60376386\n",
      "Iteration 163, loss = 0.59497645\n",
      "Iteration 164, loss = 0.59366751\n",
      "Iteration 165, loss = 0.59831084\n",
      "Iteration 166, loss = 0.58971230\n",
      "Iteration 167, loss = 0.58666394\n",
      "Iteration 168, loss = 0.59130740\n",
      "Iteration 169, loss = 0.59703895\n",
      "Iteration 170, loss = 0.58624624\n",
      "Iteration 171, loss = 0.58735044\n",
      "Iteration 172, loss = 0.57409751\n",
      "Iteration 173, loss = 0.58372918\n",
      "Iteration 174, loss = 0.57814894\n",
      "Iteration 175, loss = 0.57654497\n",
      "Iteration 176, loss = 0.57319673\n",
      "Iteration 177, loss = 0.57123799\n",
      "Iteration 178, loss = 0.57955215\n",
      "Iteration 179, loss = 0.57666754\n",
      "Iteration 180, loss = 0.56873268\n",
      "Iteration 181, loss = 0.56966603\n",
      "Iteration 182, loss = 0.56788275\n",
      "Iteration 183, loss = 0.56505901\n",
      "Iteration 184, loss = 0.56796116\n",
      "Iteration 185, loss = 0.57061426\n",
      "Iteration 186, loss = 0.56414912\n",
      "Iteration 187, loss = 0.56969018\n",
      "Iteration 188, loss = 0.56136835\n",
      "Iteration 189, loss = 0.56868003\n",
      "Iteration 190, loss = 0.56307664\n",
      "Iteration 191, loss = 0.55831580\n",
      "Iteration 192, loss = 0.55474516\n",
      "Iteration 193, loss = 0.55187798\n",
      "Iteration 194, loss = 0.54957015\n",
      "Iteration 195, loss = 0.55063157\n",
      "Iteration 196, loss = 0.55553429\n",
      "Iteration 197, loss = 0.55002370\n",
      "Iteration 198, loss = 0.54420872\n",
      "Iteration 199, loss = 0.54852194\n",
      "Iteration 200, loss = 0.55013041\n",
      "Iteration 201, loss = 0.54270491\n",
      "Iteration 202, loss = 0.54506759\n",
      "Iteration 203, loss = 0.55151185\n",
      "Iteration 204, loss = 0.53855605\n",
      "Iteration 205, loss = 0.54368254\n",
      "Iteration 206, loss = 0.54020208\n",
      "Iteration 207, loss = 0.54136764\n",
      "Iteration 208, loss = 0.53872973\n",
      "Iteration 209, loss = 0.54802610\n",
      "Iteration 210, loss = 0.53983954\n",
      "Iteration 211, loss = 0.53681110\n",
      "Iteration 212, loss = 0.53530491\n",
      "Iteration 213, loss = 0.53781012\n",
      "Iteration 214, loss = 0.53519524\n",
      "Iteration 215, loss = 0.53485419\n",
      "Iteration 216, loss = 0.53658796\n",
      "Iteration 217, loss = 0.53027312\n",
      "Iteration 218, loss = 0.53366425\n",
      "Iteration 219, loss = 0.52714751\n",
      "Iteration 220, loss = 0.52942952\n",
      "Iteration 221, loss = 0.52280133\n",
      "Iteration 222, loss = 0.52901953\n",
      "Iteration 223, loss = 0.52899364\n",
      "Iteration 224, loss = 0.52287695\n",
      "Iteration 225, loss = 0.52713614\n",
      "Iteration 226, loss = 0.51639508\n",
      "Iteration 227, loss = 0.51970363\n",
      "Iteration 228, loss = 0.51520646\n",
      "Iteration 229, loss = 0.51579917\n",
      "Iteration 230, loss = 0.52138315\n",
      "Iteration 231, loss = 0.51511518\n",
      "Iteration 232, loss = 0.51895494\n",
      "Iteration 233, loss = 0.51761032\n",
      "Iteration 234, loss = 0.51878466\n",
      "Iteration 235, loss = 0.51022906\n",
      "Iteration 236, loss = 0.51061007\n",
      "Iteration 237, loss = 0.51413603\n",
      "Iteration 238, loss = 0.50447438\n",
      "Iteration 239, loss = 0.50751842\n",
      "Iteration 240, loss = 0.50639813\n",
      "Iteration 241, loss = 0.51058829\n",
      "Iteration 242, loss = 0.50835349\n",
      "Iteration 243, loss = 0.50578086\n",
      "Iteration 244, loss = 0.50282703\n",
      "Iteration 245, loss = 0.50449349\n",
      "Iteration 246, loss = 0.50846576\n",
      "Iteration 247, loss = 0.50742766\n",
      "Iteration 248, loss = 0.51205821\n",
      "Iteration 249, loss = 0.50821264\n",
      "Iteration 250, loss = 0.50264736\n",
      "Iteration 251, loss = 0.50083937\n",
      "Iteration 252, loss = 0.50110062\n",
      "Iteration 253, loss = 0.50542786\n",
      "Iteration 254, loss = 0.52601514\n",
      "Iteration 255, loss = 0.50041325\n",
      "Iteration 256, loss = 0.50359284\n",
      "Iteration 257, loss = 0.50059714\n",
      "Iteration 258, loss = 0.50568739\n",
      "Iteration 259, loss = 0.50919820\n",
      "Iteration 260, loss = 0.49658882\n",
      "Iteration 261, loss = 0.49629479\n",
      "Iteration 262, loss = 0.49157258\n",
      "Iteration 263, loss = 0.49382549\n",
      "Iteration 264, loss = 0.48871539\n",
      "Iteration 265, loss = 0.49069938\n",
      "Iteration 266, loss = 0.48727851\n",
      "Iteration 267, loss = 0.49298756\n",
      "Iteration 268, loss = 0.50067546\n",
      "Iteration 269, loss = 0.48914248\n",
      "Iteration 270, loss = 0.48793991\n",
      "Iteration 271, loss = 0.48358628\n",
      "Iteration 272, loss = 0.49674394\n",
      "Iteration 273, loss = 0.48476729\n",
      "Iteration 274, loss = 0.48381238\n",
      "Iteration 275, loss = 0.47865387\n",
      "Iteration 276, loss = 0.48980879\n",
      "Iteration 277, loss = 0.48493350\n",
      "Iteration 278, loss = 0.48487256\n",
      "Iteration 279, loss = 0.47883018\n",
      "Iteration 280, loss = 0.48304123\n",
      "Iteration 281, loss = 0.48892226\n",
      "Iteration 282, loss = 0.48021008\n",
      "Iteration 283, loss = 0.47583181\n",
      "Iteration 284, loss = 0.47726690\n",
      "Iteration 285, loss = 0.47601607\n",
      "Iteration 286, loss = 0.48175527\n",
      "Iteration 287, loss = 0.47287184\n",
      "Iteration 288, loss = 0.47912945\n",
      "Iteration 289, loss = 0.47858589\n",
      "Iteration 290, loss = 0.47161549\n",
      "Iteration 291, loss = 0.47542378\n",
      "Iteration 292, loss = 0.47798282\n",
      "Iteration 293, loss = 0.47785520\n",
      "Iteration 294, loss = 0.46829503\n",
      "Iteration 295, loss = 0.47543147\n",
      "Iteration 296, loss = 0.46901931\n",
      "Iteration 297, loss = 0.46391578\n",
      "Iteration 298, loss = 0.46969909\n",
      "Iteration 299, loss = 0.48499385\n",
      "Iteration 300, loss = 0.46924759\n",
      "Iteration 301, loss = 0.46647759\n",
      "Iteration 302, loss = 0.46612342\n",
      "Iteration 303, loss = 0.46995548\n",
      "Iteration 304, loss = 0.47100537\n",
      "Iteration 305, loss = 0.47024643\n",
      "Iteration 306, loss = 0.46498941\n",
      "Iteration 307, loss = 0.46258765\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 308, loss = 0.46045905\n",
      "Iteration 309, loss = 0.46784342\n",
      "Iteration 310, loss = 0.46047940\n",
      "Iteration 311, loss = 0.48445163\n",
      "Iteration 312, loss = 0.46866826\n",
      "Iteration 313, loss = 0.46034714\n",
      "Iteration 314, loss = 0.46884555\n",
      "Iteration 315, loss = 0.45894976\n",
      "Iteration 316, loss = 0.46465525\n",
      "Iteration 317, loss = 0.45794174\n",
      "Iteration 318, loss = 0.45868396\n",
      "Iteration 319, loss = 0.46205150\n",
      "Iteration 320, loss = 0.45146460\n",
      "Iteration 321, loss = 0.45548370\n",
      "Iteration 322, loss = 0.45638859\n",
      "Iteration 323, loss = 0.45453247\n",
      "Iteration 324, loss = 0.45555589\n",
      "Iteration 325, loss = 0.45733608\n",
      "Iteration 326, loss = 0.46328190\n",
      "Iteration 327, loss = 0.44899988\n",
      "Iteration 328, loss = 0.45366350\n",
      "Iteration 329, loss = 0.46042137\n",
      "Iteration 330, loss = 0.45028575\n",
      "Iteration 331, loss = 0.45339808\n",
      "Iteration 332, loss = 0.45677618\n",
      "Iteration 333, loss = 0.44931638\n",
      "Iteration 334, loss = 0.44893946\n",
      "Iteration 335, loss = 0.45157843\n",
      "Iteration 336, loss = 0.45616874\n",
      "Iteration 337, loss = 0.44571081\n",
      "Iteration 338, loss = 0.44261588\n",
      "Iteration 339, loss = 0.44144179\n",
      "Iteration 340, loss = 0.44722988\n",
      "Iteration 341, loss = 0.44244541\n",
      "Iteration 342, loss = 0.45345165\n",
      "Iteration 343, loss = 0.44733506\n",
      "Iteration 344, loss = 0.44697622\n",
      "Iteration 345, loss = 0.45122878\n",
      "Iteration 346, loss = 0.45476163\n",
      "Iteration 347, loss = 0.44175941\n",
      "Iteration 348, loss = 0.44654983\n",
      "Iteration 349, loss = 0.44795201\n",
      "Iteration 350, loss = 0.44510182\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.586675792057196\n",
      "Iteration 1, loss = 17.34025145\n",
      "Iteration 2, loss = 4.90626032\n",
      "Iteration 3, loss = 4.05139765\n",
      "Iteration 4, loss = 3.52947287\n",
      "Iteration 5, loss = 3.21859409\n",
      "Iteration 6, loss = 3.01405682\n",
      "Iteration 7, loss = 2.85491127\n",
      "Iteration 8, loss = 2.72479432\n",
      "Iteration 9, loss = 2.60795878\n",
      "Iteration 10, loss = 2.49975777\n",
      "Iteration 11, loss = 2.38337503\n",
      "Iteration 12, loss = 2.27622934\n",
      "Iteration 13, loss = 2.17416040\n",
      "Iteration 14, loss = 2.06874902\n",
      "Iteration 15, loss = 1.97245984\n",
      "Iteration 16, loss = 1.89023036\n",
      "Iteration 17, loss = 1.80400696\n",
      "Iteration 18, loss = 1.74282944\n",
      "Iteration 19, loss = 1.67128542\n",
      "Iteration 20, loss = 1.61478322\n",
      "Iteration 21, loss = 1.56628709\n",
      "Iteration 22, loss = 1.51968321\n",
      "Iteration 23, loss = 1.47992622\n",
      "Iteration 24, loss = 1.43410473\n",
      "Iteration 25, loss = 1.40487489\n",
      "Iteration 26, loss = 1.36753344\n",
      "Iteration 27, loss = 1.33075625\n",
      "Iteration 28, loss = 1.30785772\n",
      "Iteration 29, loss = 1.28025945\n",
      "Iteration 30, loss = 1.25420218\n",
      "Iteration 31, loss = 1.22925899\n",
      "Iteration 32, loss = 1.20571028\n",
      "Iteration 33, loss = 1.18614067\n",
      "Iteration 34, loss = 1.17384810\n",
      "Iteration 35, loss = 1.14473879\n",
      "Iteration 36, loss = 1.13667157\n",
      "Iteration 37, loss = 1.11901628\n",
      "Iteration 38, loss = 1.10124839\n",
      "Iteration 39, loss = 1.09138126\n",
      "Iteration 40, loss = 1.07908110\n",
      "Iteration 41, loss = 1.05108854\n",
      "Iteration 42, loss = 1.04560790\n",
      "Iteration 43, loss = 1.02918784\n",
      "Iteration 44, loss = 1.01577877\n",
      "Iteration 45, loss = 1.01136174\n",
      "Iteration 46, loss = 1.00170439\n",
      "Iteration 47, loss = 0.98840995\n",
      "Iteration 48, loss = 0.97760502\n",
      "Iteration 49, loss = 0.96905504\n",
      "Iteration 50, loss = 0.95281078\n",
      "Iteration 51, loss = 0.95890675\n",
      "Iteration 52, loss = 0.93950164\n",
      "Iteration 53, loss = 0.93690024\n",
      "Iteration 54, loss = 0.93671771\n",
      "Iteration 55, loss = 0.92261400\n",
      "Iteration 56, loss = 0.91271972\n",
      "Iteration 57, loss = 0.90046465\n",
      "Iteration 58, loss = 0.90059554\n",
      "Iteration 59, loss = 0.88994521\n",
      "Iteration 60, loss = 0.88500337\n",
      "Iteration 61, loss = 0.88234021\n",
      "Iteration 62, loss = 0.87559278\n",
      "Iteration 63, loss = 0.86861205\n",
      "Iteration 64, loss = 0.85616355\n",
      "Iteration 65, loss = 0.85281466\n",
      "Iteration 66, loss = 0.84793307\n",
      "Iteration 67, loss = 0.84012228\n",
      "Iteration 68, loss = 0.83065425\n",
      "Iteration 69, loss = 0.83152876\n",
      "Iteration 70, loss = 0.82041125\n",
      "Iteration 71, loss = 0.81919521\n",
      "Iteration 72, loss = 0.81088999\n",
      "Iteration 73, loss = 0.81303291\n",
      "Iteration 74, loss = 0.80457352\n",
      "Iteration 75, loss = 0.80459531\n",
      "Iteration 76, loss = 0.79200356\n",
      "Iteration 77, loss = 0.80103425\n",
      "Iteration 78, loss = 0.78734145\n",
      "Iteration 79, loss = 0.78293118\n",
      "Iteration 80, loss = 0.78597851\n",
      "Iteration 81, loss = 0.77907035\n",
      "Iteration 82, loss = 0.76320184\n",
      "Iteration 83, loss = 0.77508125\n",
      "Iteration 84, loss = 0.76668088\n",
      "Iteration 85, loss = 0.76016401\n",
      "Iteration 86, loss = 0.75528127\n",
      "Iteration 87, loss = 0.75521228\n",
      "Iteration 88, loss = 0.75418739\n",
      "Iteration 89, loss = 0.74886637\n",
      "Iteration 90, loss = 0.74498850\n",
      "Iteration 91, loss = 0.73724876\n",
      "Iteration 92, loss = 0.72841435\n",
      "Iteration 93, loss = 0.73014906\n",
      "Iteration 94, loss = 0.73027124\n",
      "Iteration 95, loss = 0.72539055\n",
      "Iteration 96, loss = 0.71910824\n",
      "Iteration 97, loss = 0.71803950\n",
      "Iteration 98, loss = 0.71555492\n",
      "Iteration 99, loss = 0.71783465\n",
      "Iteration 100, loss = 0.71461269\n",
      "Iteration 101, loss = 0.70436335\n",
      "Iteration 102, loss = 0.70369416\n",
      "Iteration 103, loss = 0.70064283\n",
      "Iteration 104, loss = 0.69894434\n",
      "Iteration 105, loss = 0.69878477\n",
      "Iteration 106, loss = 0.69309321\n",
      "Iteration 107, loss = 0.68751884\n",
      "Iteration 108, loss = 0.68601859\n",
      "Iteration 109, loss = 0.68287118\n",
      "Iteration 110, loss = 0.68158209\n",
      "Iteration 111, loss = 0.68141198\n",
      "Iteration 112, loss = 0.67283428\n",
      "Iteration 113, loss = 0.67002870\n",
      "Iteration 114, loss = 0.66729551\n",
      "Iteration 115, loss = 0.66629075\n",
      "Iteration 116, loss = 0.66231856\n",
      "Iteration 117, loss = 0.66777261\n",
      "Iteration 118, loss = 0.65340849\n",
      "Iteration 119, loss = 0.65662900\n",
      "Iteration 120, loss = 0.65064615\n",
      "Iteration 121, loss = 0.64829149\n",
      "Iteration 122, loss = 0.65120028\n",
      "Iteration 123, loss = 0.64500051\n",
      "Iteration 124, loss = 0.64792695\n",
      "Iteration 125, loss = 0.64247252\n",
      "Iteration 126, loss = 0.64192002\n",
      "Iteration 127, loss = 0.64590486\n",
      "Iteration 128, loss = 0.63908609\n",
      "Iteration 129, loss = 0.63339038\n",
      "Iteration 130, loss = 0.63752327\n",
      "Iteration 131, loss = 0.62946497\n",
      "Iteration 132, loss = 0.62898902\n",
      "Iteration 133, loss = 0.63023418\n",
      "Iteration 134, loss = 0.62053970\n",
      "Iteration 135, loss = 0.62109261\n",
      "Iteration 136, loss = 0.61863777\n",
      "Iteration 137, loss = 0.61654223\n",
      "Iteration 138, loss = 0.61798914\n",
      "Iteration 139, loss = 0.62220151\n",
      "Iteration 140, loss = 0.61533597\n",
      "Iteration 141, loss = 0.61210838\n",
      "Iteration 142, loss = 0.61702968\n",
      "Iteration 143, loss = 0.60472590\n",
      "Iteration 144, loss = 0.61443623\n",
      "Iteration 145, loss = 0.60206242\n",
      "Iteration 146, loss = 0.60600562\n",
      "Iteration 147, loss = 0.59539921\n",
      "Iteration 148, loss = 0.60226660\n",
      "Iteration 149, loss = 0.59479365\n",
      "Iteration 150, loss = 0.59638037\n",
      "Iteration 151, loss = 0.60176639\n",
      "Iteration 152, loss = 0.59319791\n",
      "Iteration 153, loss = 0.59176011\n",
      "Iteration 154, loss = 0.59253541\n",
      "Iteration 155, loss = 0.58992687\n",
      "Iteration 156, loss = 0.58453385\n",
      "Iteration 157, loss = 0.58526222\n",
      "Iteration 158, loss = 0.57744543\n",
      "Iteration 159, loss = 0.58276703\n",
      "Iteration 160, loss = 0.57916939\n",
      "Iteration 161, loss = 0.57545981\n",
      "Iteration 162, loss = 0.58037464\n",
      "Iteration 163, loss = 0.57370835\n",
      "Iteration 164, loss = 0.57389857\n",
      "Iteration 165, loss = 0.56993285\n",
      "Iteration 166, loss = 0.57704025\n",
      "Iteration 167, loss = 0.56655262\n",
      "Iteration 168, loss = 0.56955127\n",
      "Iteration 169, loss = 0.57365825\n",
      "Iteration 170, loss = 0.56806988\n",
      "Iteration 171, loss = 0.56850870\n",
      "Iteration 172, loss = 0.55474140\n",
      "Iteration 173, loss = 0.56537552\n",
      "Iteration 174, loss = 0.56068899\n",
      "Iteration 175, loss = 0.55895105\n",
      "Iteration 176, loss = 0.55576868\n",
      "Iteration 177, loss = 0.55400442\n",
      "Iteration 178, loss = 0.55225816\n",
      "Iteration 179, loss = 0.55322223\n",
      "Iteration 180, loss = 0.55016721\n",
      "Iteration 181, loss = 0.54724136\n",
      "Iteration 182, loss = 0.55120389\n",
      "Iteration 183, loss = 0.54891534\n",
      "Iteration 184, loss = 0.56026750\n",
      "Iteration 185, loss = 0.55560767\n",
      "Iteration 186, loss = 0.54463106\n",
      "Iteration 187, loss = 0.55223442\n",
      "Iteration 188, loss = 0.54563018\n",
      "Iteration 189, loss = 0.54425379\n",
      "Iteration 190, loss = 0.53964891\n",
      "Iteration 191, loss = 0.53876857\n",
      "Iteration 192, loss = 0.53964591\n",
      "Iteration 193, loss = 0.53260603\n",
      "Iteration 194, loss = 0.54016583\n",
      "Iteration 195, loss = 0.53912426\n",
      "Iteration 196, loss = 0.53908446\n",
      "Iteration 197, loss = 0.52964405\n",
      "Iteration 198, loss = 0.52661611\n",
      "Iteration 199, loss = 0.52786622\n",
      "Iteration 200, loss = 0.52975017\n",
      "Iteration 201, loss = 0.52181899\n",
      "Iteration 202, loss = 0.52592366\n",
      "Iteration 203, loss = 0.53658770\n",
      "Iteration 204, loss = 0.52930689\n",
      "Iteration 205, loss = 0.52406989\n",
      "Iteration 206, loss = 0.51940069\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 207, loss = 0.51761753\n",
      "Iteration 208, loss = 0.51807125\n",
      "Iteration 209, loss = 0.51941113\n",
      "Iteration 210, loss = 0.51828376\n",
      "Iteration 211, loss = 0.51945198\n",
      "Iteration 212, loss = 0.52095342\n",
      "Iteration 213, loss = 0.51870307\n",
      "Iteration 214, loss = 0.51215747\n",
      "Iteration 215, loss = 0.51468486\n",
      "Iteration 216, loss = 0.51427538\n",
      "Iteration 217, loss = 0.51452748\n",
      "Iteration 218, loss = 0.51251907\n",
      "Iteration 219, loss = 0.50966559\n",
      "Iteration 220, loss = 0.50937072\n",
      "Iteration 221, loss = 0.50675846\n",
      "Iteration 222, loss = 0.51435366\n",
      "Iteration 223, loss = 0.51123864\n",
      "Iteration 224, loss = 0.50507350\n",
      "Iteration 225, loss = 0.51035979\n",
      "Iteration 226, loss = 0.50401014\n",
      "Iteration 227, loss = 0.53271902\n",
      "Iteration 228, loss = 0.50112442\n",
      "Iteration 229, loss = 0.50452591\n",
      "Iteration 230, loss = 0.50111459\n",
      "Iteration 231, loss = 0.49953102\n",
      "Iteration 232, loss = 0.50099756\n",
      "Iteration 233, loss = 0.49764533\n",
      "Iteration 234, loss = 0.50449314\n",
      "Iteration 235, loss = 0.49194755\n",
      "Iteration 236, loss = 0.49576503\n",
      "Iteration 237, loss = 0.49646591\n",
      "Iteration 238, loss = 0.49437752\n",
      "Iteration 239, loss = 0.48911105\n",
      "Iteration 240, loss = 0.49343930\n",
      "Iteration 241, loss = 0.49291107\n",
      "Iteration 242, loss = 0.49164523\n",
      "Iteration 243, loss = 0.48792884\n",
      "Iteration 244, loss = 0.48557548\n",
      "Iteration 245, loss = 0.48448265\n",
      "Iteration 246, loss = 0.49100173\n",
      "Iteration 247, loss = 0.48955656\n",
      "Iteration 248, loss = 0.48693002\n",
      "Iteration 249, loss = 0.48664343\n",
      "Iteration 250, loss = 0.48436837\n",
      "Iteration 251, loss = 0.48858670\n",
      "Iteration 252, loss = 0.48507687\n",
      "Iteration 253, loss = 0.48995479\n",
      "Iteration 254, loss = 0.48985661\n",
      "Iteration 255, loss = 0.47947045\n",
      "Iteration 256, loss = 0.48885938\n",
      "Iteration 257, loss = 0.47785578\n",
      "Iteration 258, loss = 0.48541786\n",
      "Iteration 259, loss = 0.48655453\n",
      "Iteration 260, loss = 0.48167195\n",
      "Iteration 261, loss = 0.47717418\n",
      "Iteration 262, loss = 0.47846535\n",
      "Iteration 263, loss = 0.47682619\n",
      "Iteration 264, loss = 0.46802530\n",
      "Iteration 265, loss = 0.47540109\n",
      "Iteration 266, loss = 0.47297883\n",
      "Iteration 267, loss = 0.47064949\n",
      "Iteration 268, loss = 0.47771609\n",
      "Iteration 269, loss = 0.47588217\n",
      "Iteration 270, loss = 0.47047543\n",
      "Iteration 271, loss = 0.46349873\n",
      "Iteration 272, loss = 0.47632177\n",
      "Iteration 273, loss = 0.47191503\n",
      "Iteration 274, loss = 0.47005291\n",
      "Iteration 275, loss = 0.46250911\n",
      "Iteration 276, loss = 0.46978279\n",
      "Iteration 277, loss = 0.46841767\n",
      "Iteration 278, loss = 0.46664715\n",
      "Iteration 279, loss = 0.46447257\n",
      "Iteration 280, loss = 0.46284850\n",
      "Iteration 281, loss = 0.46758521\n",
      "Iteration 282, loss = 0.46302253\n",
      "Iteration 283, loss = 0.46460992\n",
      "Iteration 284, loss = 0.46351552\n",
      "Iteration 285, loss = 0.46118904\n",
      "Iteration 286, loss = 0.45596057\n",
      "Iteration 287, loss = 0.45599808\n",
      "Iteration 288, loss = 0.46214511\n",
      "Iteration 289, loss = 0.45994553\n",
      "Iteration 290, loss = 0.46111470\n",
      "Iteration 291, loss = 0.46116759\n",
      "Iteration 292, loss = 0.46714469\n",
      "Iteration 293, loss = 0.46203462\n",
      "Iteration 294, loss = 0.45140506\n",
      "Iteration 295, loss = 0.45576445\n",
      "Iteration 296, loss = 0.45664901\n",
      "Iteration 297, loss = 0.45017809\n",
      "Iteration 298, loss = 0.45152556\n",
      "Iteration 299, loss = 0.45956137\n",
      "Iteration 300, loss = 0.45279326\n",
      "Iteration 301, loss = 0.45868357\n",
      "Iteration 302, loss = 0.45026403\n",
      "Iteration 303, loss = 0.45255546\n",
      "Iteration 304, loss = 0.45017733\n",
      "Iteration 305, loss = 0.45997124\n",
      "Iteration 306, loss = 0.44533712\n",
      "Iteration 307, loss = 0.44810167\n",
      "Iteration 308, loss = 0.44812250\n",
      "Iteration 309, loss = 0.45027949\n",
      "Iteration 310, loss = 0.44654190\n",
      "Iteration 311, loss = 0.44975308\n",
      "Iteration 312, loss = 0.44371977\n",
      "Iteration 313, loss = 0.44118666\n",
      "Iteration 314, loss = 0.45235383\n",
      "Iteration 315, loss = 0.44584335\n",
      "Iteration 316, loss = 0.44786837\n",
      "Iteration 317, loss = 0.44612695\n",
      "Iteration 318, loss = 0.44286305\n",
      "Iteration 319, loss = 0.44448883\n",
      "Iteration 320, loss = 0.43896268\n",
      "Iteration 321, loss = 0.44082903\n",
      "Iteration 322, loss = 0.44446217\n",
      "Iteration 323, loss = 0.44018750\n",
      "Iteration 324, loss = 0.44057899\n",
      "Iteration 325, loss = 0.43952085\n",
      "Iteration 326, loss = 0.44145112\n",
      "Iteration 327, loss = 0.43802659\n",
      "Iteration 328, loss = 0.43571194\n",
      "Iteration 329, loss = 0.46250035\n",
      "Iteration 330, loss = 0.44130046\n",
      "Iteration 331, loss = 0.43511688\n",
      "Iteration 332, loss = 0.44319759\n",
      "Iteration 333, loss = 0.43171219\n",
      "Iteration 334, loss = 0.43194073\n",
      "Iteration 335, loss = 0.43552027\n",
      "Iteration 336, loss = 0.44164452\n",
      "Iteration 337, loss = 0.43630894\n",
      "Iteration 338, loss = 0.43340602\n",
      "Iteration 339, loss = 0.43036876\n",
      "Iteration 340, loss = 0.43594135\n",
      "Iteration 341, loss = 0.43023044\n",
      "Iteration 342, loss = 0.44071595\n",
      "Iteration 343, loss = 0.43207425\n",
      "Iteration 344, loss = 0.43187530\n",
      "Iteration 345, loss = 0.43474766\n",
      "Iteration 346, loss = 0.43720862\n",
      "Iteration 347, loss = 0.42647494\n",
      "Iteration 348, loss = 0.43504953\n",
      "Iteration 349, loss = 0.43679127\n",
      "Iteration 350, loss = 0.42752651\n",
      "Iteration 351, loss = 0.43579392\n",
      "Iteration 352, loss = 0.42107526\n",
      "Iteration 353, loss = 0.43576518\n",
      "Iteration 354, loss = 0.43144510\n",
      "Iteration 355, loss = 0.42469400\n",
      "Iteration 356, loss = 0.43857123\n",
      "Iteration 357, loss = 0.42388555\n",
      "Iteration 358, loss = 0.42706625\n",
      "Iteration 359, loss = 0.42461360\n",
      "Iteration 360, loss = 0.42843815\n",
      "Iteration 361, loss = 0.42574497\n",
      "Iteration 362, loss = 0.42462543\n",
      "Iteration 363, loss = 0.42684508\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.6984459636890783\n",
      "Iteration 1, loss = 17.40942878\n",
      "Iteration 2, loss = 4.92222315\n",
      "Iteration 3, loss = 4.08110693\n",
      "Iteration 4, loss = 3.55997822\n",
      "Iteration 5, loss = 3.25400250\n",
      "Iteration 6, loss = 3.05477974\n",
      "Iteration 7, loss = 2.89931533\n",
      "Iteration 8, loss = 2.76982401\n",
      "Iteration 9, loss = 2.65406070\n",
      "Iteration 10, loss = 2.54177068\n",
      "Iteration 11, loss = 2.42157955\n",
      "Iteration 12, loss = 2.31426468\n",
      "Iteration 13, loss = 2.21070915\n",
      "Iteration 14, loss = 2.10357635\n",
      "Iteration 15, loss = 2.00553642\n",
      "Iteration 16, loss = 1.91586504\n",
      "Iteration 17, loss = 1.82993497\n",
      "Iteration 18, loss = 1.76056252\n",
      "Iteration 19, loss = 1.68743758\n",
      "Iteration 20, loss = 1.63015050\n",
      "Iteration 21, loss = 1.58357941\n",
      "Iteration 22, loss = 1.53284863\n",
      "Iteration 23, loss = 1.49792771\n",
      "Iteration 24, loss = 1.44950688\n",
      "Iteration 25, loss = 1.41597243\n",
      "Iteration 26, loss = 1.38200410\n",
      "Iteration 27, loss = 1.34906352\n",
      "Iteration 28, loss = 1.32995809\n",
      "Iteration 29, loss = 1.30076237\n",
      "Iteration 30, loss = 1.27347885\n",
      "Iteration 31, loss = 1.24986811\n",
      "Iteration 32, loss = 1.22723954\n",
      "Iteration 33, loss = 1.20175673\n",
      "Iteration 34, loss = 1.19461629\n",
      "Iteration 35, loss = 1.16715686\n",
      "Iteration 36, loss = 1.15723071\n",
      "Iteration 37, loss = 1.14239899\n",
      "Iteration 38, loss = 1.12387128\n",
      "Iteration 39, loss = 1.11177862\n",
      "Iteration 40, loss = 1.09040885\n",
      "Iteration 41, loss = 1.07613334\n",
      "Iteration 42, loss = 1.06242359\n",
      "Iteration 43, loss = 1.05562966\n",
      "Iteration 44, loss = 1.03148604\n",
      "Iteration 45, loss = 1.02878991\n",
      "Iteration 46, loss = 1.01935727\n",
      "Iteration 47, loss = 1.01505231\n",
      "Iteration 48, loss = 0.99381934\n",
      "Iteration 49, loss = 0.99428418\n",
      "Iteration 50, loss = 0.97005526\n",
      "Iteration 51, loss = 0.97349982\n",
      "Iteration 52, loss = 0.95750400\n",
      "Iteration 53, loss = 0.95634200\n",
      "Iteration 54, loss = 0.95414979\n",
      "Iteration 55, loss = 0.93803502\n",
      "Iteration 56, loss = 0.92906989\n",
      "Iteration 57, loss = 0.91572088\n",
      "Iteration 58, loss = 0.92210492\n",
      "Iteration 59, loss = 0.90541391\n",
      "Iteration 60, loss = 0.90594895\n",
      "Iteration 61, loss = 0.89814738\n",
      "Iteration 62, loss = 0.88940088\n",
      "Iteration 63, loss = 0.87975306\n",
      "Iteration 64, loss = 0.87287174\n",
      "Iteration 65, loss = 0.87395691\n",
      "Iteration 66, loss = 0.86439308\n",
      "Iteration 67, loss = 0.85498450\n",
      "Iteration 68, loss = 0.85213142\n",
      "Iteration 69, loss = 0.84032112\n",
      "Iteration 70, loss = 0.83248252\n",
      "Iteration 71, loss = 0.83753074\n",
      "Iteration 72, loss = 0.84947543\n",
      "Iteration 73, loss = 0.83697458\n",
      "Iteration 74, loss = 0.81510441\n",
      "Iteration 75, loss = 0.81849403\n",
      "Iteration 76, loss = 0.80688418\n",
      "Iteration 77, loss = 0.81147859\n",
      "Iteration 78, loss = 0.80255525\n",
      "Iteration 79, loss = 0.79769932\n",
      "Iteration 80, loss = 0.79986943\n",
      "Iteration 81, loss = 0.79201625\n",
      "Iteration 82, loss = 0.77831710\n",
      "Iteration 83, loss = 0.78603139\n",
      "Iteration 84, loss = 0.77759546\n",
      "Iteration 85, loss = 0.77702343\n",
      "Iteration 86, loss = 0.76888212\n",
      "Iteration 87, loss = 0.77311631\n",
      "Iteration 88, loss = 0.76909783\n",
      "Iteration 89, loss = 0.76676154\n",
      "Iteration 90, loss = 0.77215783\n",
      "Iteration 91, loss = 0.75585947\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 92, loss = 0.74856205\n",
      "Iteration 93, loss = 0.75969106\n",
      "Iteration 94, loss = 0.74187512\n",
      "Iteration 95, loss = 0.74475715\n",
      "Iteration 96, loss = 0.73914678\n",
      "Iteration 97, loss = 0.73437609\n",
      "Iteration 98, loss = 0.74751397\n",
      "Iteration 99, loss = 0.74085834\n",
      "Iteration 100, loss = 0.73626545\n",
      "Iteration 101, loss = 0.72078485\n",
      "Iteration 102, loss = 0.72266243\n",
      "Iteration 103, loss = 0.72269833\n",
      "Iteration 104, loss = 0.71366030\n",
      "Iteration 105, loss = 0.72200919\n",
      "Iteration 106, loss = 0.71898968\n",
      "Iteration 107, loss = 0.70453113\n",
      "Iteration 108, loss = 0.70855425\n",
      "Iteration 109, loss = 0.70307211\n",
      "Iteration 110, loss = 0.69852784\n",
      "Iteration 111, loss = 0.70646914\n",
      "Iteration 112, loss = 0.69372035\n",
      "Iteration 113, loss = 0.69155067\n",
      "Iteration 114, loss = 0.68599914\n",
      "Iteration 115, loss = 0.68919046\n",
      "Iteration 116, loss = 0.70088019\n",
      "Iteration 117, loss = 0.69229784\n",
      "Iteration 118, loss = 0.68108989\n",
      "Iteration 119, loss = 0.67868829\n",
      "Iteration 120, loss = 0.67878283\n",
      "Iteration 121, loss = 0.66882706\n",
      "Iteration 122, loss = 0.67527443\n",
      "Iteration 123, loss = 0.66881144\n",
      "Iteration 124, loss = 0.67165698\n",
      "Iteration 125, loss = 0.66428260\n",
      "Iteration 126, loss = 0.66548342\n",
      "Iteration 127, loss = 0.67278408\n",
      "Iteration 128, loss = 0.65714446\n",
      "Iteration 129, loss = 0.65241784\n",
      "Iteration 130, loss = 0.66087501\n",
      "Iteration 131, loss = 0.65783098\n",
      "Iteration 132, loss = 0.65507114\n",
      "Iteration 133, loss = 0.65489301\n",
      "Iteration 134, loss = 0.64685585\n",
      "Iteration 135, loss = 0.64727064\n",
      "Iteration 136, loss = 0.63987336\n",
      "Iteration 137, loss = 0.63843500\n",
      "Iteration 138, loss = 0.64288779\n",
      "Iteration 139, loss = 0.64752103\n",
      "Iteration 140, loss = 0.63878634\n",
      "Iteration 141, loss = 0.64460886\n",
      "Iteration 142, loss = 0.64154871\n",
      "Iteration 143, loss = 0.62452059\n",
      "Iteration 144, loss = 0.64103206\n",
      "Iteration 145, loss = 0.63098461\n",
      "Iteration 146, loss = 0.63989142\n",
      "Iteration 147, loss = 0.62560032\n",
      "Iteration 148, loss = 0.62926734\n",
      "Iteration 149, loss = 0.61989595\n",
      "Iteration 150, loss = 0.62479913\n",
      "Iteration 151, loss = 0.62518050\n",
      "Iteration 152, loss = 0.61719630\n",
      "Iteration 153, loss = 0.61905018\n",
      "Iteration 154, loss = 0.62396245\n",
      "Iteration 155, loss = 0.61463208\n",
      "Iteration 156, loss = 0.61714946\n",
      "Iteration 157, loss = 0.61360185\n",
      "Iteration 158, loss = 0.60063536\n",
      "Iteration 159, loss = 0.61088209\n",
      "Iteration 160, loss = 0.61294594\n",
      "Iteration 161, loss = 0.60386737\n",
      "Iteration 162, loss = 0.60424012\n",
      "Iteration 163, loss = 0.60066108\n",
      "Iteration 164, loss = 0.60046456\n",
      "Iteration 165, loss = 0.59916405\n",
      "Iteration 166, loss = 0.59621717\n",
      "Iteration 167, loss = 0.59030077\n",
      "Iteration 168, loss = 0.58981898\n",
      "Iteration 169, loss = 0.59792872\n",
      "Iteration 170, loss = 0.58896940\n",
      "Iteration 171, loss = 0.59476569\n",
      "Iteration 172, loss = 0.58343090\n",
      "Iteration 173, loss = 0.59084196\n",
      "Iteration 174, loss = 0.58873940\n",
      "Iteration 175, loss = 0.58635326\n",
      "Iteration 176, loss = 0.57738374\n",
      "Iteration 177, loss = 0.58384571\n",
      "Iteration 178, loss = 0.58332869\n",
      "Iteration 179, loss = 0.57598088\n",
      "Iteration 180, loss = 0.57761133\n",
      "Iteration 181, loss = 0.57352255\n",
      "Iteration 182, loss = 0.57894369\n",
      "Iteration 183, loss = 0.57602270\n",
      "Iteration 184, loss = 0.57868125\n",
      "Iteration 185, loss = 0.58364663\n",
      "Iteration 186, loss = 0.57745405\n",
      "Iteration 187, loss = 0.58416711\n",
      "Iteration 188, loss = 0.56877297\n",
      "Iteration 189, loss = 0.56952713\n",
      "Iteration 190, loss = 0.57890852\n",
      "Iteration 191, loss = 0.57087550\n",
      "Iteration 192, loss = 0.56461090\n",
      "Iteration 193, loss = 0.56154896\n",
      "Iteration 194, loss = 0.56761784\n",
      "Iteration 195, loss = 0.57118630\n",
      "Iteration 196, loss = 0.56781945\n",
      "Iteration 197, loss = 0.55712757\n",
      "Iteration 198, loss = 0.55223010\n",
      "Iteration 199, loss = 0.56225676\n",
      "Iteration 200, loss = 0.55308883\n",
      "Iteration 201, loss = 0.55178961\n",
      "Iteration 202, loss = 0.55431315\n",
      "Iteration 203, loss = 0.55838923\n",
      "Iteration 204, loss = 0.55029375\n",
      "Iteration 205, loss = 0.55031208\n",
      "Iteration 206, loss = 0.55082515\n",
      "Iteration 207, loss = 0.54420776\n",
      "Iteration 208, loss = 0.54461028\n",
      "Iteration 209, loss = 0.54506029\n",
      "Iteration 210, loss = 0.54946190\n",
      "Iteration 211, loss = 0.54286189\n",
      "Iteration 212, loss = 0.54322834\n",
      "Iteration 213, loss = 0.54496332\n",
      "Iteration 214, loss = 0.53736374\n",
      "Iteration 215, loss = 0.54204646\n",
      "Iteration 216, loss = 0.54465343\n",
      "Iteration 217, loss = 0.54100338\n",
      "Iteration 218, loss = 0.53710113\n",
      "Iteration 219, loss = 0.53864055\n",
      "Iteration 220, loss = 0.53530713\n",
      "Iteration 221, loss = 0.53284222\n",
      "Iteration 222, loss = 0.53798370\n",
      "Iteration 223, loss = 0.53919020\n",
      "Iteration 224, loss = 0.53717672\n",
      "Iteration 225, loss = 0.53873144\n",
      "Iteration 226, loss = 0.53335074\n",
      "Iteration 227, loss = 0.52608975\n",
      "Iteration 228, loss = 0.52195666\n",
      "Iteration 229, loss = 0.52542219\n",
      "Iteration 230, loss = 0.52563604\n",
      "Iteration 231, loss = 0.52655365\n",
      "Iteration 232, loss = 0.52059735\n",
      "Iteration 233, loss = 0.52602590\n",
      "Iteration 234, loss = 0.53247856\n",
      "Iteration 235, loss = 0.51590677\n",
      "Iteration 236, loss = 0.52168665\n",
      "Iteration 237, loss = 0.52499473\n",
      "Iteration 238, loss = 0.52047044\n",
      "Iteration 239, loss = 0.51296127\n",
      "Iteration 240, loss = 0.52175645\n",
      "Iteration 241, loss = 0.52119834\n",
      "Iteration 242, loss = 0.51852855\n",
      "Iteration 243, loss = 0.51011281\n",
      "Iteration 244, loss = 0.50958323\n",
      "Iteration 245, loss = 0.51086622\n",
      "Iteration 246, loss = 0.51313463\n",
      "Iteration 247, loss = 0.51275735\n",
      "Iteration 248, loss = 0.51384728\n",
      "Iteration 249, loss = 0.50533865\n",
      "Iteration 250, loss = 0.50910081\n",
      "Iteration 251, loss = 0.51023625\n",
      "Iteration 252, loss = 0.50892721\n",
      "Iteration 253, loss = 0.50883218\n",
      "Iteration 254, loss = 0.52184931\n",
      "Iteration 255, loss = 0.50604863\n",
      "Iteration 256, loss = 0.51065055\n",
      "Iteration 257, loss = 0.50815042\n",
      "Iteration 258, loss = 0.51172261\n",
      "Iteration 259, loss = 0.50945386\n",
      "Iteration 260, loss = 0.50515274\n",
      "Iteration 261, loss = 0.49805776\n",
      "Iteration 262, loss = 0.50025095\n",
      "Iteration 263, loss = 0.50679232\n",
      "Iteration 264, loss = 0.49388936\n",
      "Iteration 265, loss = 0.50094313\n",
      "Iteration 266, loss = 0.49379694\n",
      "Iteration 267, loss = 0.49631545\n",
      "Iteration 268, loss = 0.49680487\n",
      "Iteration 269, loss = 0.49443973\n",
      "Iteration 270, loss = 0.49684923\n",
      "Iteration 271, loss = 0.49223185\n",
      "Iteration 272, loss = 0.50275942\n",
      "Iteration 273, loss = 0.49552245\n",
      "Iteration 274, loss = 0.49114413\n",
      "Iteration 275, loss = 0.48557799\n",
      "Iteration 276, loss = 0.49086041\n",
      "Iteration 277, loss = 0.48856302\n",
      "Iteration 278, loss = 0.50480159\n",
      "Iteration 279, loss = 0.49103240\n",
      "Iteration 280, loss = 0.49840007\n",
      "Iteration 281, loss = 0.49235793\n",
      "Iteration 282, loss = 0.48795211\n",
      "Iteration 283, loss = 0.48399375\n",
      "Iteration 284, loss = 0.48190930\n",
      "Iteration 285, loss = 0.48350509\n",
      "Iteration 286, loss = 0.47860097\n",
      "Iteration 287, loss = 0.48577942\n",
      "Iteration 288, loss = 0.47783458\n",
      "Iteration 289, loss = 0.48014899\n",
      "Iteration 290, loss = 0.47928281\n",
      "Iteration 291, loss = 0.47954995\n",
      "Iteration 292, loss = 0.47890284\n",
      "Iteration 293, loss = 0.47969035\n",
      "Iteration 294, loss = 0.46897451\n",
      "Iteration 295, loss = 0.47589301\n",
      "Iteration 296, loss = 0.47651590\n",
      "Iteration 297, loss = 0.47132196\n",
      "Iteration 298, loss = 0.47435374\n",
      "Iteration 299, loss = 0.48183485\n",
      "Iteration 300, loss = 0.47425128\n",
      "Iteration 301, loss = 0.47871073\n",
      "Iteration 302, loss = 0.47186425\n",
      "Iteration 303, loss = 0.46783209\n",
      "Iteration 304, loss = 0.46960696\n",
      "Iteration 305, loss = 0.47835242\n",
      "Iteration 306, loss = 0.47424765\n",
      "Iteration 307, loss = 0.46979262\n",
      "Iteration 308, loss = 0.46455265\n",
      "Iteration 309, loss = 0.46934882\n",
      "Iteration 310, loss = 0.46615179\n",
      "Iteration 311, loss = 0.48350776\n",
      "Iteration 312, loss = 0.46661944\n",
      "Iteration 313, loss = 0.46798128\n",
      "Iteration 314, loss = 0.47119334\n",
      "Iteration 315, loss = 0.46787824\n",
      "Iteration 316, loss = 0.46764918\n",
      "Iteration 317, loss = 0.46436985\n",
      "Iteration 318, loss = 0.46403750\n",
      "Iteration 319, loss = 0.46768188\n",
      "Iteration 320, loss = 0.46290393\n",
      "Iteration 321, loss = 0.46245012\n",
      "Iteration 322, loss = 0.46648190\n",
      "Iteration 323, loss = 0.46135389\n",
      "Iteration 324, loss = 0.46210636\n",
      "Iteration 325, loss = 0.45875950\n",
      "Iteration 326, loss = 0.46577039\n",
      "Iteration 327, loss = 0.46110491\n",
      "Iteration 328, loss = 0.44864841\n",
      "Iteration 329, loss = 0.46205668\n",
      "Iteration 330, loss = 0.45436187\n",
      "Iteration 331, loss = 0.46126458\n",
      "Iteration 332, loss = 0.47190996\n",
      "Iteration 333, loss = 0.45375848\n",
      "Iteration 334, loss = 0.45215954\n",
      "Iteration 335, loss = 0.44888370\n",
      "Iteration 336, loss = 0.45856270\n",
      "Iteration 337, loss = 0.45271412\n",
      "Iteration 338, loss = 0.44972463\n",
      "Iteration 339, loss = 0.44727262\n",
      "Iteration 340, loss = 0.45127136\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 341, loss = 0.44731283\n",
      "Iteration 342, loss = 0.45344948\n",
      "Iteration 343, loss = 0.44849063\n",
      "Iteration 344, loss = 0.45372705\n",
      "Iteration 345, loss = 0.45028973\n",
      "Iteration 346, loss = 0.45782381\n",
      "Iteration 347, loss = 0.44952938\n",
      "Iteration 348, loss = 0.45508738\n",
      "Iteration 349, loss = 0.45535797\n",
      "Iteration 350, loss = 0.44831402\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.4299385227539156\n",
      "Iteration 1, loss = 17.41462498\n",
      "Iteration 2, loss = 4.92447222\n",
      "Iteration 3, loss = 4.08430553\n",
      "Iteration 4, loss = 3.56376990\n",
      "Iteration 5, loss = 3.25223011\n",
      "Iteration 6, loss = 3.04162495\n",
      "Iteration 7, loss = 2.89424774\n",
      "Iteration 8, loss = 2.75506806\n",
      "Iteration 9, loss = 2.64203846\n",
      "Iteration 10, loss = 2.51667165\n",
      "Iteration 11, loss = 2.39592986\n",
      "Iteration 12, loss = 2.27810289\n",
      "Iteration 13, loss = 2.16637483\n",
      "Iteration 14, loss = 2.05907638\n",
      "Iteration 15, loss = 1.96020471\n",
      "Iteration 16, loss = 1.86788029\n",
      "Iteration 17, loss = 1.78987443\n",
      "Iteration 18, loss = 1.71572468\n",
      "Iteration 19, loss = 1.65007863\n",
      "Iteration 20, loss = 1.59522220\n",
      "Iteration 21, loss = 1.54490378\n",
      "Iteration 22, loss = 1.50138665\n",
      "Iteration 23, loss = 1.46236519\n",
      "Iteration 24, loss = 1.41630778\n",
      "Iteration 25, loss = 1.38260849\n",
      "Iteration 26, loss = 1.35200833\n",
      "Iteration 27, loss = 1.31847711\n",
      "Iteration 28, loss = 1.29657017\n",
      "Iteration 29, loss = 1.27106021\n",
      "Iteration 30, loss = 1.24604733\n",
      "Iteration 31, loss = 1.21741689\n",
      "Iteration 32, loss = 1.19557600\n",
      "Iteration 33, loss = 1.17537842\n",
      "Iteration 34, loss = 1.16598275\n",
      "Iteration 35, loss = 1.13734886\n",
      "Iteration 36, loss = 1.12227873\n",
      "Iteration 37, loss = 1.10977584\n",
      "Iteration 38, loss = 1.09128228\n",
      "Iteration 39, loss = 1.08267736\n",
      "Iteration 40, loss = 1.06508742\n",
      "Iteration 41, loss = 1.05180882\n",
      "Iteration 42, loss = 1.03396710\n",
      "Iteration 43, loss = 1.02969805\n",
      "Iteration 44, loss = 1.01101441\n",
      "Iteration 45, loss = 1.00136251\n",
      "Iteration 46, loss = 0.99338233\n",
      "Iteration 47, loss = 0.99111566\n",
      "Iteration 48, loss = 0.97132608\n",
      "Iteration 49, loss = 0.97101714\n",
      "Iteration 50, loss = 0.95021031\n",
      "Iteration 51, loss = 0.95294617\n",
      "Iteration 52, loss = 0.94841138\n",
      "Iteration 53, loss = 0.94131856\n",
      "Iteration 54, loss = 0.93086107\n",
      "Iteration 55, loss = 0.91831813\n",
      "Iteration 56, loss = 0.91567868\n",
      "Iteration 57, loss = 0.89962790\n",
      "Iteration 58, loss = 0.89594583\n",
      "Iteration 59, loss = 0.89141121\n",
      "Iteration 60, loss = 0.89210827\n",
      "Iteration 61, loss = 0.88013080\n",
      "Iteration 62, loss = 0.87026194\n",
      "Iteration 63, loss = 0.86578233\n",
      "Iteration 64, loss = 0.85935280\n",
      "Iteration 65, loss = 0.85432294\n",
      "Iteration 66, loss = 0.84271062\n",
      "Iteration 67, loss = 0.84269765\n",
      "Iteration 68, loss = 0.83801801\n",
      "Iteration 69, loss = 0.82859542\n",
      "Iteration 70, loss = 0.81760946\n",
      "Iteration 71, loss = 0.82366938\n",
      "Iteration 72, loss = 0.81812086\n",
      "Iteration 73, loss = 0.82151354\n",
      "Iteration 74, loss = 0.79970892\n",
      "Iteration 75, loss = 0.80682542\n",
      "Iteration 76, loss = 0.79470175\n",
      "Iteration 77, loss = 0.79324975\n",
      "Iteration 78, loss = 0.78951104\n",
      "Iteration 79, loss = 0.78280675\n",
      "Iteration 80, loss = 0.78855074\n",
      "Iteration 81, loss = 0.78098134\n",
      "Iteration 82, loss = 0.76927741\n",
      "Iteration 83, loss = 0.77387414\n",
      "Iteration 84, loss = 0.77387859\n",
      "Iteration 85, loss = 0.76163094\n",
      "Iteration 86, loss = 0.75818901\n",
      "Iteration 87, loss = 0.76657563\n",
      "Iteration 88, loss = 0.75945875\n",
      "Iteration 89, loss = 0.75440700\n",
      "Iteration 90, loss = 0.74476044\n",
      "Iteration 91, loss = 0.73727603\n",
      "Iteration 92, loss = 0.73456300\n",
      "Iteration 93, loss = 0.73599180\n",
      "Iteration 94, loss = 0.72876858\n",
      "Iteration 95, loss = 0.72064266\n",
      "Iteration 96, loss = 0.72324366\n",
      "Iteration 97, loss = 0.71858117\n",
      "Iteration 98, loss = 0.72072479\n",
      "Iteration 99, loss = 0.72263265\n",
      "Iteration 100, loss = 0.71673754\n",
      "Iteration 101, loss = 0.70179678\n",
      "Iteration 102, loss = 0.70143068\n",
      "Iteration 103, loss = 0.69894876\n",
      "Iteration 104, loss = 0.69306394\n",
      "Iteration 105, loss = 0.70294183\n",
      "Iteration 106, loss = 0.69052786\n",
      "Iteration 107, loss = 0.68728756\n",
      "Iteration 108, loss = 0.68623370\n",
      "Iteration 109, loss = 0.68164003\n",
      "Iteration 110, loss = 0.68422173\n",
      "Iteration 111, loss = 0.68112782\n",
      "Iteration 112, loss = 0.67269670\n",
      "Iteration 113, loss = 0.67098409\n",
      "Iteration 114, loss = 0.66398592\n",
      "Iteration 115, loss = 0.66595782\n",
      "Iteration 116, loss = 0.66199140\n",
      "Iteration 117, loss = 0.66684555\n",
      "Iteration 118, loss = 0.65717391\n",
      "Iteration 119, loss = 0.66373624\n",
      "Iteration 120, loss = 0.65579056\n",
      "Iteration 121, loss = 0.64832524\n",
      "Iteration 122, loss = 0.65067406\n",
      "Iteration 123, loss = 0.64599678\n",
      "Iteration 124, loss = 0.65057381\n",
      "Iteration 125, loss = 0.63842802\n",
      "Iteration 126, loss = 0.64038937\n",
      "Iteration 127, loss = 0.64879616\n",
      "Iteration 128, loss = 0.63895026\n",
      "Iteration 129, loss = 0.62851558\n",
      "Iteration 130, loss = 0.63660626\n",
      "Iteration 131, loss = 0.64147376\n",
      "Iteration 132, loss = 0.63108105\n",
      "Iteration 133, loss = 0.63237172\n",
      "Iteration 134, loss = 0.62324609\n",
      "Iteration 135, loss = 0.62248913\n",
      "Iteration 136, loss = 0.61777104\n",
      "Iteration 137, loss = 0.61628067\n",
      "Iteration 138, loss = 0.62249532\n",
      "Iteration 139, loss = 0.61777062\n",
      "Iteration 140, loss = 0.61127171\n",
      "Iteration 141, loss = 0.61247963\n",
      "Iteration 142, loss = 0.62066364\n",
      "Iteration 143, loss = 0.60930495\n",
      "Iteration 144, loss = 0.62007436\n",
      "Iteration 145, loss = 0.60315579\n",
      "Iteration 146, loss = 0.60657892\n",
      "Iteration 147, loss = 0.59743977\n",
      "Iteration 148, loss = 0.59616631\n",
      "Iteration 149, loss = 0.59160334\n",
      "Iteration 150, loss = 0.59995973\n",
      "Iteration 151, loss = 0.59650711\n",
      "Iteration 152, loss = 0.59000079\n",
      "Iteration 153, loss = 0.58873696\n",
      "Iteration 154, loss = 0.59228478\n",
      "Iteration 155, loss = 0.59356100\n",
      "Iteration 156, loss = 0.62555623\n",
      "Iteration 157, loss = 0.59116328\n",
      "Iteration 158, loss = 0.58035756\n",
      "Iteration 159, loss = 0.59408400\n",
      "Iteration 160, loss = 0.58966911\n",
      "Iteration 161, loss = 0.58521112\n",
      "Iteration 162, loss = 0.58386803\n",
      "Iteration 163, loss = 0.57269960\n",
      "Iteration 164, loss = 0.57530530\n",
      "Iteration 165, loss = 0.58155144\n",
      "Iteration 166, loss = 0.57877742\n",
      "Iteration 167, loss = 0.57015161\n",
      "Iteration 168, loss = 0.56798198\n",
      "Iteration 169, loss = 0.58098799\n",
      "Iteration 170, loss = 0.56314558\n",
      "Iteration 171, loss = 0.56912114\n",
      "Iteration 172, loss = 0.55975314\n",
      "Iteration 173, loss = 0.56852756\n",
      "Iteration 174, loss = 0.55915316\n",
      "Iteration 175, loss = 0.56189892\n",
      "Iteration 176, loss = 0.55674093\n",
      "Iteration 177, loss = 0.57204749\n",
      "Iteration 178, loss = 0.56133837\n",
      "Iteration 179, loss = 0.55389948\n",
      "Iteration 180, loss = 0.55706260\n",
      "Iteration 181, loss = 0.55326434\n",
      "Iteration 182, loss = 0.55223865\n",
      "Iteration 183, loss = 0.55334009\n",
      "Iteration 184, loss = 0.55244593\n",
      "Iteration 185, loss = 0.55396916\n",
      "Iteration 186, loss = 0.54641286\n",
      "Iteration 187, loss = 0.55436140\n",
      "Iteration 188, loss = 0.54540696\n",
      "Iteration 189, loss = 0.54151829\n",
      "Iteration 190, loss = 0.54888217\n",
      "Iteration 191, loss = 0.54355123\n",
      "Iteration 192, loss = 0.53875997\n",
      "Iteration 193, loss = 0.53805194\n",
      "Iteration 194, loss = 0.54298359\n",
      "Iteration 195, loss = 0.54442176\n",
      "Iteration 196, loss = 0.54191151\n",
      "Iteration 197, loss = 0.53684101\n",
      "Iteration 198, loss = 0.52798132\n",
      "Iteration 199, loss = 0.53611104\n",
      "Iteration 200, loss = 0.52943596\n",
      "Iteration 201, loss = 0.52976987\n",
      "Iteration 202, loss = 0.52988515\n",
      "Iteration 203, loss = 0.53218043\n",
      "Iteration 204, loss = 0.52699043\n",
      "Iteration 205, loss = 0.52599287\n",
      "Iteration 206, loss = 0.52423230\n",
      "Iteration 207, loss = 0.52075924\n",
      "Iteration 208, loss = 0.52074683\n",
      "Iteration 209, loss = 0.52193528\n",
      "Iteration 210, loss = 0.52741883\n",
      "Iteration 211, loss = 0.52540637\n",
      "Iteration 212, loss = 0.51972265\n",
      "Iteration 213, loss = 0.52357278\n",
      "Iteration 214, loss = 0.51432340\n",
      "Iteration 215, loss = 0.51789352\n",
      "Iteration 216, loss = 0.52094687\n",
      "Iteration 217, loss = 0.51672008\n",
      "Iteration 218, loss = 0.51219299\n",
      "Iteration 219, loss = 0.51274681\n",
      "Iteration 220, loss = 0.51020404\n",
      "Iteration 221, loss = 0.50873097\n",
      "Iteration 222, loss = 0.51559665\n",
      "Iteration 223, loss = 0.52071174\n",
      "Iteration 224, loss = 0.51378115\n",
      "Iteration 225, loss = 0.51433270\n",
      "Iteration 226, loss = 0.50720675\n",
      "Iteration 227, loss = 0.50289663\n",
      "Iteration 228, loss = 0.50132624\n",
      "Iteration 229, loss = 0.50551478\n",
      "Iteration 230, loss = 0.50788070\n",
      "Iteration 231, loss = 0.49876404\n",
      "Iteration 232, loss = 0.49781941\n",
      "Iteration 233, loss = 0.51207711\n",
      "Iteration 234, loss = 0.50752755\n",
      "Iteration 235, loss = 0.49922546\n",
      "Iteration 236, loss = 0.50324646\n",
      "Iteration 237, loss = 0.50588139\n",
      "Iteration 238, loss = 0.50220800\n",
      "Iteration 239, loss = 0.49715276\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iteration 240, loss = 0.50092564\n",
      "Iteration 241, loss = 0.49978591\n",
      "Iteration 242, loss = 0.50053733\n",
      "Iteration 243, loss = 0.49414385\n",
      "Iteration 244, loss = 0.49643979\n",
      "Iteration 245, loss = 0.49433416\n",
      "Iteration 246, loss = 0.49441851\n",
      "Iteration 247, loss = 0.49197589\n",
      "Iteration 248, loss = 0.49248049\n",
      "Iteration 249, loss = 0.49222521\n",
      "Iteration 250, loss = 0.48546507\n",
      "Iteration 251, loss = 0.48855401\n",
      "Iteration 252, loss = 0.49073796\n",
      "Iteration 253, loss = 0.49470968\n",
      "Iteration 254, loss = 0.50528921\n",
      "Iteration 255, loss = 0.48551017\n",
      "Iteration 256, loss = 0.49060099\n",
      "Iteration 257, loss = 0.48608229\n",
      "Iteration 258, loss = 0.49201806\n",
      "Iteration 259, loss = 0.49024544\n",
      "Iteration 260, loss = 0.48375696\n",
      "Iteration 261, loss = 0.47887307\n",
      "Iteration 262, loss = 0.48329625\n",
      "Iteration 263, loss = 0.48764925\n",
      "Iteration 264, loss = 0.47680961\n",
      "Iteration 265, loss = 0.47847623\n",
      "Iteration 266, loss = 0.47404511\n",
      "Iteration 267, loss = 0.47619225\n",
      "Iteration 268, loss = 0.47611475\n",
      "Iteration 269, loss = 0.47899025\n",
      "Iteration 270, loss = 0.48154952\n",
      "Iteration 271, loss = 0.47113854\n",
      "Iteration 272, loss = 0.48503177\n",
      "Iteration 273, loss = 0.47863808\n",
      "Iteration 274, loss = 0.47370617\n",
      "Iteration 275, loss = 0.46708923\n",
      "Iteration 276, loss = 0.47756322\n",
      "Iteration 277, loss = 0.47370357\n",
      "Iteration 278, loss = 0.47383658\n",
      "Iteration 279, loss = 0.47290496\n",
      "Iteration 280, loss = 0.47563736\n",
      "Iteration 281, loss = 0.47267517\n",
      "Iteration 282, loss = 0.47174644\n",
      "Iteration 283, loss = 0.46927076\n",
      "Iteration 284, loss = 0.46626131\n",
      "Iteration 285, loss = 0.46521524\n",
      "Iteration 286, loss = 0.46509461\n",
      "Iteration 287, loss = 0.46220297\n",
      "Iteration 288, loss = 0.47377981\n",
      "Iteration 289, loss = 0.49435171\n",
      "Iteration 290, loss = 0.46699173\n",
      "Iteration 291, loss = 0.46359606\n",
      "Iteration 292, loss = 0.46337775\n",
      "Iteration 293, loss = 0.46383786\n",
      "Iteration 294, loss = 0.45755808\n",
      "Iteration 295, loss = 0.46119564\n",
      "Iteration 296, loss = 0.46081803\n",
      "Iteration 297, loss = 0.45904635\n",
      "Iteration 298, loss = 0.46382766\n",
      "Iteration 299, loss = 0.46699433\n",
      "Iteration 300, loss = 0.45927007\n",
      "Iteration 301, loss = 0.46381106\n",
      "Iteration 302, loss = 0.46749151\n",
      "Iteration 303, loss = 0.45499032\n",
      "Iteration 304, loss = 0.45841982\n",
      "Iteration 305, loss = 0.46160317\n",
      "Iteration 306, loss = 0.45530589\n",
      "Iteration 307, loss = 0.45285805\n",
      "Iteration 308, loss = 0.45193572\n",
      "Iteration 309, loss = 0.45555740\n",
      "Iteration 310, loss = 0.45620333\n",
      "Iteration 311, loss = 0.45559057\n",
      "Iteration 312, loss = 0.44652547\n",
      "Iteration 313, loss = 0.44573547\n",
      "Iteration 314, loss = 0.45434048\n",
      "Iteration 315, loss = 0.44921434\n",
      "Iteration 316, loss = 0.44796155\n",
      "Iteration 317, loss = 0.44981530\n",
      "Iteration 318, loss = 0.45333009\n",
      "Iteration 319, loss = 0.44831911\n",
      "Iteration 320, loss = 0.44460423\n",
      "Iteration 321, loss = 0.44730189\n",
      "Iteration 322, loss = 0.44882660\n",
      "Iteration 323, loss = 0.44804190\n",
      "Iteration 324, loss = 0.44541200\n",
      "Iteration 325, loss = 0.44609182\n",
      "Iteration 326, loss = 0.45021094\n",
      "Iteration 327, loss = 0.44276468\n",
      "Iteration 328, loss = 0.44016002\n",
      "Iteration 329, loss = 0.45998360\n",
      "Iteration 330, loss = 0.44278932\n",
      "Iteration 331, loss = 0.44535216\n",
      "Iteration 332, loss = 0.44811857\n",
      "Iteration 333, loss = 0.44651308\n",
      "Iteration 334, loss = 0.44302694\n",
      "Iteration 335, loss = 0.43799672\n",
      "Iteration 336, loss = 0.44748854\n",
      "Iteration 337, loss = 0.44173826\n",
      "Iteration 338, loss = 0.43497485\n",
      "Iteration 339, loss = 0.43742194\n",
      "Iteration 340, loss = 0.44043380\n",
      "Iteration 341, loss = 0.43762079\n",
      "Iteration 342, loss = 0.44511093\n",
      "Iteration 343, loss = 0.43539745\n",
      "Iteration 344, loss = 0.43863783\n",
      "Iteration 345, loss = 0.43743094\n",
      "Iteration 346, loss = 0.44265485\n",
      "Iteration 347, loss = 0.43750643\n",
      "Iteration 348, loss = 0.44032902\n",
      "Iteration 349, loss = 0.44604562\n",
      "Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.\n",
      "2.7730573235245157\n",
      "[10.94360324  7.71594956 14.14533411 ...  5.68396704 13.77028072\n",
      " 13.46956823]\n",
      "[[0, 56590], [1, 2243], [2, 1390722], [3, 1443501], [4, 520979], [5, 809], [6, 21], [7, 645870], [8, 512583], [9, 273923], [10, 18], [11, 2195860], [12, 635017], [13, 6976126], [14, 452751], [15, 680794], [16, 15326], [17, 888343], [18, 851151], [19, 11126], [20, 14512], [21, 3888870], [22, 603251724], [23, 49], [24, 123943], [25, 3301663], [26, 51678], [27, 66], [28, 117], [29, 13821], [30, 158456], [31, 30352], [32, 22130590], [33, 3334423], [34, 3], [35, 368], [36, 2992475], [37, 2572984], [38, 4159874], [39, 1624387], [40, 2703835], [41, 759769], [42, 2385905], [43, 964], [44, 128430], [45, 21], [46, 39], [47, 15], [48, 891], [49, 1570387], [50, 514381], [51, 10425794], [52, 6940603], [53, 166197], [54, 12522731], [55, 199794], [56, 3975386], [57, 684879], [58, 122293], [59, 296506], [60, 70216], [61, 162281], [62, 1187959], [63, 7978], [64, 197], [65, 16], [66, 151006], [67, 295222], [68, 85304], [69, 2470485], [70, 3985386], [71, 241851], [72, 8], [73, 54234], [74, 23781044], [75, 105], [76, 5880870], [77, 3117952], [78, 38121985], [79, 10475845], [80, 16], [81, 669], [82, 16495358], [83, 366648], [84, 43154], [85, 33950], [86, 575309], [87, 3086003], [88, 277], [89, 1447535], [90, 3742580], [91, 3305415], [92, 219536], [93, 147783], [94, 39627], [95, 1506323], [96, 2899345], [97, 247], [98, 12370198], [99, 4], [100, 26058], [101, 749126], [102, 310], [103, 2164], [104, 944033], [105, 54], [106, 60536988], [107, 208137], [108, 22498975], [109, 664161], [110, 35], [111, 6], [112, 883581], [113, 139622], [114, 716485], [115, 3635419], [116, 34], [117, 15267], [118, 13788], [119, 126324], [120, 285], [121, 4008688], [122, 809271], [123, 27], [124, 966253], [125, 336525], [126, 27527221], [127, 1567220], [128, 546373], [129, 46570], [130, 29198], [131, 90907], [132, 51981], [133, 511943], [134, 305], [135, 104112], [136, 34], [137, 213846], [138, 2461224], [139, 210400], [140, 205530], [141, 1202263], [142, 0], [143, 31746], [144, 1205], [145, 338767], [146, 1528997], [147, 522538], [148, 1334], [149, 15336], [150, 3357688], [151, 347849], [152, 314019795], [153, 31091], [154, 575965], [155, 9857055], [156, 76599492], [157, 698650], [158, 171021], [159, 51], [160, 2404613], [161, 977853], [162, 4671], [163, 894522], [164, 1639322], [165, 1024816], [166, 11922543], [167, 20570461], [168, 2071663], [169, 68], [170, 435489], [171, 535372], [172, 255037], [173, 235520], [174, 726880], [175, 624], [176, 396], [177, 14302437], [178, 102299], [179, 1132589], [180, 88752121], [181, 1965207], [182, 1710], [183, 60458], [184, 38745815], [185, 1874950], [186, 237886], [187, 360066], [188, 25643], [189, 9774], [190, 15258120], [191, 921824], [192, 2411750], [193, 3], [194, 22], [195, 157807], [196, 28054877], [197, 286864], [198, 197], [199, 124817], [200, 1], [201, 9088880], [202, 3217975], [203, 24856], [204, 66485452], [205, 24260107], [206, 37], [207, 808], [208, 12073578], [209, 9869352], [210, 152], [211, 4669455], [212, 85], [213, 47621], [214, 17991], [215, 12], [216, 334], [217, 1942333], [218, 1140767], [219, 469474], [220, 84], [221, 3], [222, 3052], [223, 1231286], [224, 100757], [225, 9387532], [226, 56077], [227, 1186], [228, 56], [229, 14718450], [230, 1373405], [231, 2661733], [232, 406], [233, 96938], [234, 2440156], [235, 781678], [236, 331182], [237, 799064], [238, 20974], [239, 4106536], [240, 8140], [241, 9145061], [242, 22], [243, 6495], [244, 846340], [245, 841568], [246, 128405], [247, 2024687], [248, 1806955], [249, 137806], [250, 20923], [251, 106592], [252, 3310], [253, 918640], [254, 4707359], [255, 6868617], [256, 55], [257, 36531640], [258, 21549450], [259, 112462], [260, 180103], [261, 888877], [262, 229876], [263, 1287], [264, 1311417], [265, 330199], [266, 72536695], [267, 9029977], [268, 108183], [269, 4], [270, 222924], [271, 1260074], [272, 703462], [273, 16384215], [274, 414269], [275, 43994747], [276, 2237642], [277, 284269], [278, 69], [279, 12443048], [280, 55], [281, 352140], [282, 13], [283, 154], [284, 963780], [285, 52850], [286, 14389], [287, 169224], [288, 220645], [289, 5983912], [290, 518214], [291, 7], [292, 205967453], [293, 1317858], [294, 12841], [295, 145], [296, 599124], [297, 1248826], [298, 2039161], [299, 805], [300, 19101594], [301, 73], [302, 121011], [303, 304717], [304, 96094], [305, 127182], [306, 577], [307, 1940767], [308, 4476346], [309, 183586], [310, 1985122], [311, 330], [312, 7], [313, 365413], [314, 257893], [315, 21787741], [316, 234], [317, 178441], [318, 235], [319, 188], [320, 3], [321, 129083], [322, 11749164], [323, 247749], [324, 727], [325, 1545432], [326, 409756], [327, 55], [328, 45], [329, 4], [330, 717872], [331, 752784], [332, 9902017], [333, 962086], [334, 712222], [335, 43], [336, 35546], [337, 133200], [338, 4589762], [339, 6], [340, 1284983], [341, 75785], [342, 96938504], [343, 118], [344, 6], [345, 63], [346, 707355], [347, 7013], [348, 222690], [349, 1393], [350, 129025], [351, 3247934], [352, 198150], [353, 481231], [354, 540732], [355, 540087], [356, 4588], [357, 22470762], [358, 6036789], [359, 3016284], [360, 66599], [361, 1675333], [362, 781703], [363, 25063730], [364, 2099439], [365, 1236433], [366, 303152], [367, 106514300], [368, 2024979], [369, 36088826], [370, 166842], [371, 32], [372, 6483], [373, 154335], [374, 63144], [375, 2392002], [376, 6984], [377, 1676197], [378, 24907937], [379, 1], [380, 439], [381, 106], [382, 548998], [383, 4654481], [384, 3080387], [385, 19], [386, 55028592], [387, 1176784], [388, 1380259], [389, 10275], [390, 2635496], [391, 6312119], [392, 5056578], [393, 420], [394, 24924550], [395, 22], [396, 200], [397, 18359], [398, 991623], [399, 512745813], [400, 8358424], [401, 1697], [402, 459055], [403, 39008], [404, 127], [405, 677920], [406, 43642036], [407, 69255], [408, 6223816], [409, 140752], [410, 965393], [411, 978], [412, 456710], [413, 198], [414, 1185645], [415, 248929], [416, 37881], [417, 5341135], [418, 19826951], [419, 502741], [420, 4737259], [421, 200535], [422, 23577], [423, 7], [424, 194673], [425, 1052087], [426, 51637], [427, 1], [428, 75856], [429, 1140], [430, 24], [431, 1909323], [432, 7], [433, 30917], [434, 181000], [435, 114198], [436, 7217053], [437, 68234], [438, 1138770], [439, 2875417], [440, 92], [441, 107288], [442, 69], [443, 104], [444, 4106790], [445, 2259129], [446, 34432], [447, 353776], [448, 1084950], [449, 448345], [450, 15], [451, 1654], [452, 5361031], [453, 13048014], [454, 2000], [455, 4662219], [456, 6596940], [457, 1717480], [458, 5740], [459, 48545], [460, 239743], [461, 1623536], [462, 121767], [463, 1153141], [464, 222619], [465, 11353], [466, 2308251], [467, 62], [468, 18], [469, 19], [470, 623322], [471, 133923], [472, 39978], [473, 4], [474, 3775162], [475, 185], [476, 3071961], [477, 908231], [478, 1410594], [479, 2483671], [480, 207], [481, 3041678], [482, 195573], [483, 3022521], [484, 1905602], [485, 924735], [486, 1369], [487, 503985], [488, 21], [489, 635466], [490, 10362], [491, 28], [492, 237499], [493, 10], [494, 50], [495, 558764], [496, 346047], [497, 33238], [498, 38], [499, 974], [500, 14545502], [501, 2829], [502, 310], [503, 64], [504, 28459670], [505, 10701], [506, 24257], [507, 135141], [508, 107], [509, 418987], [510, 34], [511, 42], [512, 760881], [513, 2205265], [514, 2525573], [515, 1505927], [516, 253514], [517, 329491], [518, 9542848], [519, 19144], [520, 19], [521, 870], [522, 2727], [523, 7], [524, 1337390], [525, 26405], [526, 67], [527, 5852], [528, 190], [529, 53367], [530, 26810], [531, 13610257], [532, 16891069], [533, 562393], [534, 104611], [535, 135778296], [536, 7], [537, 175251], [538, 28598647], [539, 2730121], [540, 74], [541, 91660738], [542, 54546469], [543, 1038031], [544, 4], [545, 432], [546, 2786821], [547, 249259], [548, 1247583], [549, 619], [550, 86], [551, 18978], [552, 6475059], [553, 252554], [554, 0], [555, 3625504], [556, 68517], [557, 6919003], [558, 175079], [559, 352286], [560, 140], [561, 45769], [562, 203290], [563, 4241566], [564, 4007330], [565, 23113], [566, 6524132], [567, 24177324], [568, 1], [569, 3871022], [570, 6105046], [571, 2400486], [572, 66520], [573, 345445], [574, 56623], [575, 224701], [576, 3908756], [577, 909223], [578, 328945], [579, 1289], [580, 53018], [581, 886675], [582, 1032516], [583, 1131737], [584, 12994], [585, 3509687], [586, 8154422], [587, 1407164], [588, 3239246], [589, 49837], [590, 2080611], [591, 243089], [592, 10401], [593, 2045114], [594, 3], [595, 38890], [596, 444002], [597, 828199], [598, 321286], [599, 405], [600, 116], [601, 181097], [602, 795111], [603, 2602871], [604, 315164], [605, 1131836], [606, 24178360], [607, 4094561], [608, 5045058], [609, 158056], [610, 14606], [611, 1204304], [612, 36501], [613, 268], [614, 1193], [615, 26185742], [616, 32095], [617, 15], [618, 19315], [619, 2249355], [620, 11101], [621, 22022], [622, 82108], [623, 41], [624, 82785], [625, 755780], [626, 35924887], [627, 961142], [628, 1611643], [629, 3218361], [630, 970272], [631, 49], [632, 6493894], [633, 13], [634, 287], [635, 16328351], [636, 8301251], [637, 107269], [638, 52459], [639, 932746], [640, 72257], [641, 2770847], [642, 201], [643, 254989], [644, 10328050], [645, 858006], [646, 768079], [647, 376143], [648, 2170888], [649, 2], [650, 21431], [651, 11], [652, 3603205], [653, 19186], [654, 27503], [655, 33228], [656, 178], [657, 1871128], [658, 1264], [659, 14], [660, 543583], [661, 922994], [662, 312], [663, 734396], [664, 3853933], [665, 560565], [666, 788827], [667, 42490], [668, 303], [669, 10677], [670, 132892], [671, 120898], [672, 558], [673, 2161964], [674, 55], [675, 5587082], [676, 986934], [677, 8440], [678, 3959957], [679, 643687], [680, 14], [681, 5], [682, 2592997], [683, 120], [684, 50261072], [685, 6502331], [686, 83113], [687, 47], [688, 962737], [689, 147], [690, 123078], [691, 311207], [692, 17828228], [693, 1086978], [694, 40], [695, 40789], [696, 19292137], [697, 10205612], [698, 1167871], [699, 41752210], [700, 4075138], [701, 6129480], [702, 331251], [703, 22], [704, 2953287], [705, 70], [706, 5346715], [707, 756267], [708, 529672], [709, 191582], [710, 1650696], [711, 510219], [712, 212365], [713, 541905], [714, 224], [715, 3802], [716, 37331454], [717, 31266], [718, 842], [719, 17122727], [720, 942084], [721, 1442271], [722, 14398066], [723, 85], [724, 95580190], [725, 25089896], [726, 18160224], [727, 14878522], [728, 57589806], [729, 707898], [730, 187641], [731, 714122], [732, 105], [733, 2024146], [734, 16546], [735, 98927], [736, 276061], [737, 947803], [738, 8428], [739, 9998], [740, 1623367], [741, 825149], [742, 6680713], [743, 26927623], [744, 16769131], [745, 6106316], [746, 44241], [747, 1113], [748, 2498094], [749, 11295], [750, 338951], [751, 66], [752, 630722], [753, 815], [754, 8260236], [755, 321], [756, 537], [757, 2835641], [758, 2821333], [759, 304], [760, 288976], [761, 1974975], [762, 438382], [763, 1637945], [764, 17], [765, 212569], [766, 8953], [767, 118], [768, 56933], [769, 6208], [770, 217595], [771, 8083909], [772, 15114161], [773, 55], [774, 24682004], [775, 43], [776, 23979], [777, 3806879], [778, 6921656], [779, 395], [780, 982353], [781, 142], [782, 291121], [783, 9735814], [784, 59879964], [785, 1453250], [786, 16248], [787, 3899284], [788, 595057], [789, 855999], [790, 105767276], [791, 79], [792, 2749127], [793, 11350], [794, 118121], [795, 375797], [796, 64235], [797, 130], [798, 212058], [799, 96863], [800, 117522], [801, 1723836], [802, 1428], [803, 16880], [804, 242976], [805, 1053696], [806, 449222], [807, 4329373], [808, 285], [809, 595329], [810, 683758], [811, 191764], [812, 8299675], [813, 81886576], [814, 147317], [815, 4068395], [816, 36], [817, 31827], [818, 764553], [819, 1361], [820, 103459], [821, 1844069], [822, 204039], [823, 14729399], [824, 2], [825, 4781884], [826, 33542356], [827, 3794046], [828, 4150913], [829, 41530], [830, 2572], [831, 10542398], [832, 1557364], [833, 1808852], [834, 248779885], [835, 78], [836, 2210751], [837, 13847182], [838, 14299458], [839, 32167], [840, 85992], [841, 1628402], [842, 40474], [843, 374], [844, 132662], [845, 257699], [846, 64], [847, 656823], [848, 7708067], [849, 3969824], [850, 757322], [851, 4], [852, 4971061], [853, 104930], [854, 1362436], [855, 299519], [856, 8773191], [857, 2139], [858, 3982941], [859, 86276], [860, 20412007], [861, 0], [862, 4], [863, 104373876], [864, 60988474], [865, 1994], [866, 246566], [867, 52285], [868, 6394662], [869, 9225756], [870, 23459080], [871, 35248], [872, 2869636], [873, 5814729], [874, 1288813], [875, 9456036], [876, 534369], [877, 74874930], [878, 30294], [879, 163340], [880, 10395110], [881, 5130], [882, 5341], [883, 67090], [884, 901590], [885, 953091], [886, 33130885], [887, 2458713], [888, 2136750], [889, 195106], [890, 28], [891, 112173], [892, 965325], [893, 235630], [894, 32], [895, 11700840], [896, 73116], [897, 2539193], [898, 1467923], [899, 92], [900, 2588861], [901, 12], [902, 98683], [903, 404], [904, 78977], [905, 2183], [906, 272626], [907, 1012924], [908, 1015736], [909, 203665], [910, 98], [911, 27542867], [912, 1022], [913, 66789795], [914, 408531], [915, 463], [916, 118317], [917, 63], [918, 1378685], [919, 680702], [920, 6628], [921, 356900], [922, 1357867], [923, 14214335], [924, 78767], [925, 1686045], [926, 133707], [927, 12935], [928, 710], [929, 779441], [930, 982235], [931, 963744], [932, 68], [933, 1636392], [934, 2366], [935, 521669675], [936, 75532], [937, 2], [938, 781914], [939, 9667620], [940, 1816164], [941, 786], [942, 456], [943, 138557], [944, 131], [945, 250], [946, 81], [947, 27102155], [948, 549569], [949, 365289], [950, 11514375], [951, 8], [952, 181489], [953, 393556], [954, 81109], [955, 97], [956, 25454], [957, 479], [958, 542674], [959, 481], [960, 2767667], [961, 17824], [962, 679269], [963, 882], [964, 2316300], [965, 133], [966, 1823478], [967, 1452288], [968, 4084163], [969, 135], [970, 9413535], [971, 6750], [972, 804006], [973, 5537368], [974, 20590277], [975, 94], [976, 964], [977, 975479], [978, 6938], [979, 246], [980, 785401], [981, 3846044], [982, 10521027], [983, 1295829], [984, 1936904], [985, 16403], [986, 9184], [987, 23], [988, 73881], [989, 3290429], [990, 1114], [991, 3084855], [992, 2523623], [993, 406308], [994, 3163061], [995, 150165], [996, 180530], [997, 23], [998, 1444806], [999, 10718], [1000, 791517], [1001, 2501187], [1002, 310672], [1003, 1243], [1004, 235634], [1005, 5896838], [1006, 3089458], [1007, 112], [1008, 128673], [1009, 767749], [1010, 728], [1011, 204], [1012, 44927], [1013, 4448769], [1014, 10589], [1015, 206], [1016, 67400], [1017, 389041], [1018, 18032], [1019, 815345], [1020, 4696466], [1021, 292], [1022, 9838239], [1023, 61448], [1024, 24109], [1025, 1472474], [1026, 10], [1027, 1218075], [1028, 323], [1029, 956828], [1030, 22898], [1031, 1527040], [1032, 1284], [1033, 7114291], [1034, 16783], [1035, 926894], [1036, 405294], [1037, 157726], [1038, 3219622], [1039, 12], [1040, 10661435], [1041, 384022], [1042, 1173324], [1043, 549437], [1044, 355292], [1045, 23060279], [1046, 4173039], [1047, 550281], [1048, 1898], [1049, 444648], [1050, 85897], [1051, 66], [1052, 268861], [1053, 49539554], [1054, 11007857], [1055, 323514], [1056, 1068691], [1057, 3039178], [1058, 521], [1059, 2972857], [1060, 369680], [1061, 76002], [1062, 8553553], [1063, 2545391], [1064, 205], [1065, 491158], [1066, 81331], [1067, 86435], [1068, 281912], [1069, 53469], [1070, 1477319], [1071, 2759], [1072, 1373700], [1073, 12763], [1074, 154], [1075, 1714896], [1076, 559099], [1077, 19783], [1078, 3648894], [1079, 246594], [1080, 35445857], [1081, 8670733], [1082, 1588344], [1083, 151181], [1084, 797026], [1085, 55], [1086, 3937755], [1087, 73], [1088, 250283], [1089, 9], [1090, 1389611], [1091, 2398311], [1092, 17], [1093, 437231], [1094, 26647462], [1095, 27246], [1096, 38642], [1097, 48421], [1098, 21982], [1099, 270636], [1100, 338276], [1101, 849723], [1102, 432729], [1103, 5996978], [1104, 3439675], [1105, 63527], [1106, 5080], [1107, 6576698], [1108, 598], [1109, 3108], [1110, 81], [1111, 1063823], [1112, 91872], [1113, 930917], [1114, 305512], [1115, 697248], [1116, 37621695], [1117, 4153593], [1118, 202034], [1119, 343380], [1120, 3268], [1121, 4542754], [1122, 321390], [1123, 43], [1124, 604367], [1125, 1406422], [1126, 1127916], [1127, 1081539], [1128, 100], [1129, 1064535], [1130, 3782705], [1131, 722684], [1132, 290917], [1133, 20352732], [1134, 85551632], [1135, 4231230], [1136, 25], [1137, 7], [1138, 102507], [1139, 3189414], [1140, 962], [1141, 728555], [1142, 652371], [1143, 455], [1144, 1145253], [1145, 1093077], [1146, 842407], [1147, 424681], [1148, 10706], [1149, 780247], [1150, 30826890], [1151, 257931], [1152, 714734], [1153, 60822069], [1154, 83012], [1155, 500626], [1156, 52162207], [1157, 121107], [1158, 168211], [1159, 1593421], [1160, 23791625], [1161, 9996758], [1162, 7002329], [1163, 1580058], [1164, 1583], [1165, 93], [1166, 4121], [1167, 3833879], [1168, 7178727], [1169, 2157], [1170, 3], [1171, 243], [1172, 48], [1173, 232321], [1174, 27593], [1175, 73953596], [1176, 281568], [1177, 17660], [1178, 786720], [1179, 836], [1180, 13546803], [1181, 2127243], [1182, 9336031], [1183, 1350428], [1184, 31], [1185, 1151014], [1186, 3418512], [1187, 9882796], [1188, 48036], [1189, 283931], [1190, 266079], [1191, 18391649], [1192, 15307362], [1193, 28724], [1194, 671], [1195, 3516813], [1196, 6845197], [1197, 25203690], [1198, 1619515], [1199, 34217], [1200, 22], [1201, 1], [1202, 977], [1203, 3094503], [1204, 14505], [1205, 1357398], [1206, 5262462], [1207, 1336], [1208, 24], [1209, 4931522], [1210, 642798], [1211, 841545], [1212, 2109], [1213, 968623], [1214, 2459296], [1215, 1506527], [1216, 3136416], [1217, 468934], [1218, 406183], [1219, 332395], [1220, 995], [1221, 45], [1222, 194], [1223, 748482], [1224, 7758988], [1225, 121337], [1226, 339], [1227, 14007939], [1228, 52842], [1229, 4185158], [1230, 415], [1231, 102726], [1232, 845901], [1233, 3439557], [1234, 18532171], [1235, 5038], [1236, 2852554], [1237, 11971735], [1238, 56], [1239, 2715359], [1240, 1238439], [1241, 233799], [1242, 1505], [1243, 1248], [1244, 397846], [1245, 87], [1246, 344576], [1247, 2036], [1248, 128245], [1249, 2786638], [1250, 273], [1251, 1290117], [1252, 536340], [1253, 5429677], [1254, 25297], [1255, 1800842], [1256, 969], [1257, 1282267], [1258, 3], [1259, 34], [1260, 2023273], [1261, 917784], [1262, 8405], [1263, 13624], [1264, 43924], [1265, 3391], [1266, 2], [1267, 175], [1268, 4722385], [1269, 42], [1270, 521383], [1271, 6139502], [1272, 1584244], [1273, 789176], [1274, 1966577], [1275, 1192], [1276, 188168], [1277, 574945], [1278, 1598035], [1279, 3275521], [1280, 403080], [1281, 5121297], [1282, 630917], [1283, 222085], [1284, 502274], [1285, 1015400], [1286, 38421908], [1287, 251301], [1288, 2154452], [1289, 22644223], [1290, 655035], [1291, 10454982], [1292, 11672], [1293, 268795], [1294, 23019], [1295, 2171707], [1296, 45166], [1297, 1171764], [1298, 462377], [1299, 267583], [1300, 17372], [1301, 163500], [1302, 21863333], [1303, 4155283], [1304, 1122055], [1305, 707350], [1306, 352157], [1307, 16300859], [1308, 8], [1309, 3966034], [1310, 997320], [1311, 141165], [1312, 1038626], [1313, 164269], [1314, 558153], [1315, 140654156], [1316, 265], [1317, 313], [1318, 123], [1319, 15373732], [1320, 2009], [1321, 18886], [1322, 6039002], [1323, 803486], [1324, 31928], [1325, 10278934], [1326, 756177], [1327, 819623], [1328, 237499], [1329, 26643], [1330, 15775], [1331, 66605], [1332, 764991], [1333, 33773187], [1334, 684699603], [1335, 130], [1336, 3660235], [1337, 6317890], [1338, 9144], [1339, 50992], [1340, 15139797], [1341, 10], [1342, 24350375], [1343, 6884376], [1344, 3611], [1345, 16], [1346, 17485], [1347, 56347342], [1348, 71], [1349, 39013], [1350, 56], [1351, 2928190], [1352, 739719], [1353, 292568], [1354, 5866995], [1355, 724621], [1356, 942704], [1357, 2075], [1358, 1219086], [1359, 23415112], [1360, 7], [1361, 3514], [1362, 201], [1363, 135830], [1364, 3218], [1365, 766404], [1366, 4935], [1367, 739101], [1368, 945035], [1369, 1999349], [1370, 3088945], [1371, 14545], [1372, 423], [1373, 25095175], [1374, 140042], [1375, 12833], [1376, 949531], [1377, 8903], [1378, 10386586], [1379, 728793], [1380, 752440], [1381, 894], [1382, 17173264], [1383, 683803], [1384, 473705], [1385, 99286], [1386, 286], [1387, 14308760], [1388, 468898], [1389, 22222411], [1390, 2175541], [1391, 106565], [1392, 1024], [1393, 3353350], [1394, 130188], [1395, 83], [1396, 93342], [1397, 374473], [1398, 68806], [1399, 132], [1400, 154801], [1401, 1679], [1402, 32042], [1403, 24336], [1404, 2343128], [1405, 178189978], [1406, 444395], [1407, 206222], [1408, 2746178], [1409, 58613], [1410, 3912033], [1411, 1182131], [1412, 294416], [1413, 4148907], [1414, 37800], [1415, 1484634], [1416, 7897947], [1417, 166974], [1418, 121525], [1419, 29888660], [1420, 227271], [1421, 78], [1422, 3496997], [1423, 27093], [1424, 1089581], [1425, 1173279], [1426, 45873], [1427, 58506], [1428, 737], [1429, 11910], [1430, 610690], [1431, 12796], [1432, 847540], [1433, 65], [1434, 1921], [1435, 9194], [1436, 661960], [1437, 9], [1438, 303], [1439, 5994], [1440, 204992], [1441, 1077772], [1442, 27029590], [1443, 12657], [1444, 21719529], [1445, 47], [1446, 19765530], [1447, 903], [1448, 7853325], [1449, 605818], [1450, 242], [1451, 6], [1452, 3052], [1453, 143743], [1454, 5348105], [1455, 835472], [1456, 120786], [1457, 5054225], [1458, 2961], [1459, 3974184], [1460, 11074803], [1461, 936568], [1462, 65395], [1463, 11766937], [1464, 1826460], [1465, 2314909], [1466, 2008366], [1467, 225], [1468, 244], [1469, 399173], [1470, 4436465], [1471, 469835], [1472, 857847], [1473, 931], [1474, 270], [1475, 15078928], [1476, 14911434], [1477, 28474], [1478, 671609], [1479, 2106386], [1480, 344955], [1481, 447918], [1482, 596459], [1483, 1166685], [1484, 9539931], [1485, 280219], [1486, 384356], [1487, 3048354], [1488, 2481670], [1489, 16], [1490, 7830], [1491, 1], [1492, 1883961], [1493, 3217], [1494, 31456], [1495, 259877], [1496, 114574], [1497, 1290106], [1498, 64], [1499, 610], [1500, 146317], [1501, 10], [1502, 24625], [1503, 8044], [1504, 31], [1505, 12493503], [1506, 593], [1507, 38338], [1508, 19944], [1509, 32882050], [1510, 17621594], [1511, 3882], [1512, 503], [1513, 25], [1514, 1160934], [1515, 68765], [1516, 12], [1517, 20160], [1518, 119249], [1519, 2214298], [1520, 73570], [1521, 78694], [1522, 4410244], [1523, 150], [1524, 1371532], [1525, 1936915], [1526, 1749723], [1527, 154557], [1528, 7366919], [1529, 975907], [1530, 2310646], [1531, 257033], [1532, 12], [1533, 4803313], [1534, 2], [1535, 2], [1536, 54195431], [1537, 103892], [1538, 16057], [1539, 495697], [1540, 170610], [1541, 1858257], [1542, 500223], [1543, 437871], [1544, 58943], [1545, 1403775], [1546, 15811], [1547, 43591], [1548, 7117], [1549, 316649], [1550, 467960003], [1551, 25070620], [1552, 10953069], [1553, 4], [1554, 366], [1555, 18823], [1556, 218], [1557, 12], [1558, 106858], [1559, 21437105], [1560, 1915], [1561, 6338568], [1562, 5970487], [1563, 843114], [1564, 13487690], [1565, 642097], [1566, 1467280], [1567, 17126233], [1568, 96], [1569, 8174216], [1570, 1363594], [1571, 926632], [1572, 14], [1573, 1294093], [1574, 73402], [1575, 70945], [1576, 19745], [1577, 18615323], [1578, 210850], [1579, 1099], [1580, 2996501], [1581, 2496165], [1582, 1269464], [1583, 3847618], [1584, 13908], [1585, 59383], [1586, 12744], [1587, 794077], [1588, 4104], [1589, 14390097], [1590, 11972397], [1591, 113], [1592, 11], [1593, 371], [1594, 142286], [1595, 1959], [1596, 6019619], [1597, 8748124], [1598, 31498], [1599, 5125], [1600, 804], [1601, 394482], [1602, 169], [1603, 305240], [1604, 8486330], [1605, 10555], [1606, 1186648], [1607, 1011266], [1608, 834603], [1609, 354993], [1610, 90], [1611, 20152], [1612, 706949], [1613, 15596917], [1614, 541845], [1615, 64737], [1616, 9404136], [1617, 3140976], [1618, 108], [1619, 4602607], [1620, 4321], [1621, 3953793], [1622, 4171393], [1623, 783759], [1624, 48080], [1625, 2534617], [1626, 12702], [1627, 828], [1628, 45], [1629, 2410072], [1630, 83354], [1631, 86533360], [1632, 549], [1633, 62145], [1634, 17928654], [1635, 2], [1636, 30], [1637, 3012555], [1638, 1519776], [1639, 162612], [1640, 8], [1641, 496160], [1642, 3943229], [1643, 576745], [1644, 10130], [1645, 27], [1646, 1304], [1647, 81], [1648, 4762], [1649, 25], [1650, 132878], [1651, 2798158], [1652, 45528435], [1653, 64225], [1654, 8489], [1655, 3989839], [1656, 149191640], [1657, 10093844], [1658, 346292], [1659, 2], [1660, 109585], [1661, 423083], [1662, 454568], [1663, 118810], [1664, 335810], [1665, 2964644], [1666, 2101224], [1667, 12506], [1668, 153], [1669, 612], [1670, 302362], [1671, 10522484], [1672, 24136], [1673, 2520042], [1674, 37465615], [1675, 1989222], [1676, 17050915], [1677, 24446661], [1678, 0], [1679, 78], [1680, 54873], [1681, 3978799], [1682, 16054], [1683, 10259858], [1684, 16332749], [1685, 6162335], [1686, 856691], [1687, 1380019], [1688, 1149276], [1689, 39715256], [1690, 173890], [1691, 2131158], [1692, 454255], [1693, 102654], [1694, 22], [1695, 313], [1696, 27008], [1697, 230668372], [1698, 372150], [1699, 4067566], [1700, 108092568], [1701, 29084234], [1702, 237965], [1703, 116], [1704, 10539631], [1705, 49], [1706, 3110013], [1707, 1855387], [1708, 2], [1709, 294476563], [1710, 312586], [1711, 81258], [1712, 1072211], [1713, 79374], [1714, 201], [1715, 934261], [1716, 84912088], [1717, 19882227], [1718, 43], [1719, 43815], [1720, 325], [1721, 821], [1722, 52186], [1723, 204277], [1724, 291200], [1725, 671494], [1726, 64718], [1727, 4226718], [1728, 5051946], [1729, 211686], [1730, 10079335], [1731, 3839284], [1732, 1369], [1733, 8995502], [1734, 682309], [1735, 70042], [1736, 34217474], [1737, 6247], [1738, 33361424], [1739, 6], [1740, 381987], [1741, 492267], [1742, 98], [1743, 183968], [1744, 146348], [1745, 702859], [1746, 3074739], [1747, 1643856], [1748, 741062], [1749, 23239], [1750, 1127660], [1751, 13137471], [1752, 60109], [1753, 794954], [1754, 114561], [1755, 4041497], [1756, 0], [1757, 95], [1758, 6313921], [1759, 22], [1760, 2768101], [1761, 1046935], [1762, 86], [1763, 715186], [1764, 23842602], [1765, 642866], [1766, 7788650], [1767, 37], [1768, 417204], [1769, 23530], [1770, 467], [1771, 54], [1772, 13637], [1773, 2090865], [1774, 8343641], [1775, 87053], [1776, 2032598], [1777, 20], [1778, 104], [1779, 24423648], [1780, 102223], [1781, 4166425], [1782, 1029], [1783, 2878144], [1784, 1034525], [1785, 13446], [1786, 7463911], [1787, 3914179], [1788, 3591109], [1789, 9514], [1790, 10036], [1791, 4823380], [1792, 369155], [1793, 8535690], [1794, 836], [1795, 3446764], [1796, 71], [1797, 3865582], [1798, 32723582], [1799, 3509130], [1800, 24864105], [1801, 668502], [1802, 25], [1803, 1484449], [1804, 2720763], [1805, 2], [1806, 49901], [1807, 20807955], [1808, 529603], [1809, 2078020], [1810, 543], [1811, 10], [1812, 1082874], [1813, 334234], [1814, 14098158], [1815, 9496300], [1816, 1107860], [1817, 674120], [1818, 7766252], [1819, 165915], [1820, 45269], [1821, 109744], [1822, 2936600], [1823, 13194], [1824, 79458804], [1825, 929259], [1826, 146002], [1827, 5154172], [1828, 16606], [1829, 65594], [1830, 901253], [1831, 82421], [1832, 41768775], [1833, 3000715], [1834, 59678], [1835, 12190792], [1836, 862804], [1837, 215080], [1838, 33118], [1839, 1861156], [1840, 5415], [1841, 1643634], [1842, 18586186], [1843, 263346], [1844, 70899], [1845, 73], [1846, 469663], [1847, 33], [1848, 1745477], [1849, 55], [1850, 38494], [1851, 1025513], [1852, 3817323], [1853, 1217], [1854, 27745835], [1855, 22998956], [1856, 109919], [1857, 117], [1858, 6433], [1859, 144262865], [1860, 34002], [1861, 547], [1862, 60736636], [1863, 214495], [1864, 353], [1865, 5315708], [1866, 587628], [1867, 4339227], [1868, 2170681], [1869, 135347], [1870, 78218], [1871, 1776378], [1872, 123807703], [1873, 894900627], [1874, 135], [1875, 302], [1876, 14014], [1877, 7], [1878, 22435], [1879, 2854725], [1880, 1477491], [1881, 145156], [1882, 829469], [1883, 805], [1884, 8541], [1885, 133041], [1886, 2293], [1887, 273], [1888, 513726], [1889, 12245657], [1890, 4019780], [1891, 384720], [1892, 35347], [1893, 335], [1894, 8701524], [1895, 127268], [1896, 17354662], [1897, 2321927], [1898, 148864], [1899, 253069], [1900, 1092613], [1901, 2633465], [1902, 139655], [1903, 23], [1904, 1], [1905, 664707], [1906, 2564024], [1907, 12507], [1908, 47], [1909, 23936], [1910, 54], [1911, 22556427], [1912, 909709], [1913, 29131], [1914, 156269], [1915, 18], [1916, 1045674], [1917, 1068696], [1918, 184], [1919, 837866], [1920, 93156], [1921, 263655], [1922, 121], [1923, 36879], [1924, 709053], [1925, 160708], [1926, 7565039], [1927, 139014640], [1928, 17953], [1929, 55505533], [1930, 484354], [1931, 1185], [1932, 258247], [1933, 677286], [1934, 254087], [1935, 1317646], [1936, 512850], [1937, 55], [1938, 115], [1939, 165903], [1940, 30700], [1941, 38880], [1942, 47854], [1943, 1583], [1944, 1318391], [1945, 1255491], [1946, 862661], [1947, 144854], [1948, 652436], [1949, 1087038], [1950, 3146], [1951, 34497], [1952, 230318], [1953, 274060123], [1954, 9445], [1955, 114214], [1956, 39573], [1957, 441240], [1958, 1326492], [1959, 272], [1960, 12834881], [1961, 493352], [1962, 1196029], [1963, 2826412], [1964, 11176366], [1965, 3756037], [1966, 17826], [1967, 300740], [1968, 1874143], [1969, 82], [1970, 767237], [1971, 2063605], [1972, 473442], [1973, 5114553], [1974, 231], [1975, 46], [1976, 3], [1977, 282], [1978, 1355039], [1979, 7190987], [1980, 153], [1981, 877347], [1982, 245578], [1983, 130], [1984, 11024438], [1985, 30108], [1986, 2556909], [1987, 249223], [1988, 95], [1989, 13724874], [1990, 11185174], [1991, 62861], [1992, 29978], [1993, 196555], [1994, 334], [1995, 3264810], [1996, 635], [1997, 294], [1998, 955777], [1999, 707553]]\n"
     ]
    }
   ],
   "source": [
    "def qerror(X_test, y_test) :\n",
    "    msle = 0\n",
    "    N = len(y_test)\n",
    "    for i in range(N) :\n",
    "        msle += (X_test[i]-y_test[i])**2\n",
    "    return msle / N\n",
    "params = {'n_estimators': 500, 'max_depth': 4, 'learning_rate': 0.01}\n",
    "# clf = RandomForestClassifier(n_estimators=500, max_leaf_nodes=16,random_state=42)\n",
    "# clf = ensemble.GradientBoostingRegressor(**params) \n",
    "kf = KFold(n_splits = 10)\n",
    "best_clf = None\n",
    "best_score = 100\n",
    "for train_index, test_index in kf.split(X_train):\n",
    "    # create neural network using MLPClassifer\n",
    "    clf = MLPRegressor(verbose = True, solver = 'sgd', activation = 'relu', max_iter = 5000, hidden_layer_sizes = (200, 200),random_state = 1, batch_size = 128)\n",
    "    x_train, x_test = np.array(X_train)[train_index], np.array(X_train)[test_index]\n",
    "    y_train, y_test = np.array(Y_train)[train_index], np.array(Y_train)[test_index]\n",
    "    clf.fit(x_train, y_train)\n",
    " \n",
    "    y_predict = clf.predict(x_test)\n",
    "    test_score = qerror(y_predict, y_test)\n",
    "    \n",
    "    print(test_score)\n",
    "    \n",
    "    #compare score of the tree models and get the best one\n",
    "    if test_score < best_score:\n",
    "        best_score = test_score\n",
    "        best_clf = clf\n",
    "    \n",
    "    #print(clf.n_outputs_)\n",
    "#clf = MLPRegressor(early_stopping = True, tol = 1e-6, verbose = True, solver = 'adam', activation = 'relu', max_iter = 5000, hidden_layer_sizes = (200, 200),random_state = 1, batch_size = 8)\n",
    "#   clf = MLPRegressor(\n",
    "#     loss='ls'\n",
    "#   , learning_rate=0.1\n",
    "#   , n_estimators=1000\n",
    "#   # , subsample=1\n",
    "#   # , min_samples_split=2\n",
    "#   # , min_samples_leaf=1\n",
    "#   , max_depth=7\n",
    "#   # , init=None\n",
    "#   # , random_state=None\n",
    "#   # , max_features=None\n",
    "#   # , alpha=0.9\n",
    "#   # , verbose=0\n",
    "#   # , max_leaf_nodes=None\n",
    "#   # , warm_start=False\n",
    "#   )\n",
    "\n",
    "Y_test = best_clf.predict(X_test)\n",
    "\n",
    "print(Y_test)\n",
    "\n",
    "Writeline = []\n",
    "for i, pre_card in enumerate(Y_test):\n",
    "    Writeline.append([i, int(math.exp(pre_card))])\n",
    "    \n",
    "print(Writeline)\n",
    "\n",
    "with open(\"xxx.csv\", \"w\", newline = '') as csvfile:\n",
    "    csv_writer = csv.writer(csvfile)\n",
    "    csv_writer.writerow(['Query ID', 'Predicted Cardinality'])\n",
    "    for i in Writeline:\n",
    "        csv_writer.writerow(i)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1 0.23533383371372454 56590 91923\n",
      "2 0.27677394555025764 2243 1325\n",
      "3 0.15304789264246363 1390722 2056564\n",
      "4 0.1831222840055486 1443501 2214440\n",
      "5 2.004426985648917 520979 2146275\n",
      "6 0.3198969724936196 809 1425\n",
      "7 2.9061660579905504 21 3\n",
      "8 0.2548420278028555 645870 1070004\n",
      "9 5.034882598855809 512583 54358\n",
      "10 7.39427858864009 273923 4155104\n",
      "11 1.6632423261508522 18 68\n",
      "12 1.0882195363432163 2195860 773674\n",
      "13 13.171915721614686 635017 23931879\n",
      "14 0.3700366474174095 6976126 12817356\n",
      "15 0.21003828435649802 452751 715973\n",
      "16 0.22632817924411375 680794 1095537\n",
      "17 3.778473077323104 15326 107065\n",
      "18 1.173144205556108 888343 2624056\n",
      "19 23.173734934899628 851151 6907\n",
      "20 1.2436492009934832 11126 33938\n",
      "21 15.671959454611798 14512 276\n",
      "22 0.0008950095462313838 3888870 4006970\n",
      "23 3.646335481608236 603251724 89371025\n",
      "24 3.2699082799730177 49 304\n",
      "25 0.24304786756854535 123943 75703\n",
      "26 0.005101238105813938 3301663 3546103\n",
      "27 1.556482080377553 51678 14841\n",
      "28 1.1431852259652973 66 22\n",
      "29 5.120362769288442 117 1133\n",
      "30 0.009883045591363118 13821 12513\n",
      "31 3.2422454868031565 158456 959206\n",
      "32 0.08579751210272105 30352 22645\n",
      "33 0.16835271635898744 22130590 33357042\n",
      "34 0.004149269384613708 3334423 3556278\n",
      "35 3.924366819651561 3 28\n",
      "36 22.23931620497164 368 41218\n",
      "37 3.302961327693203 2992475 18421274\n",
      "38 1.399988442570925 2572984 8400386\n",
      "39 0.002015287044907706 4159874 4350874\n",
      "40 10.126471254865015 1624387 39147744\n",
      "41 0.07517586105838288 2703835 3556772\n",
      "42 0.11608191326121034 759769 1068192\n",
      "43 0.20074898041570738 2385905 1524289\n",
      "44 7.534990502492965 964 61\n",
      "45 0.19785042620785873 128430 200375\n",
      "46 1.0233364045087316 21 7\n",
      "47 3.037941998376539 39 6\n",
      "48 2.2203716496177694 15 70\n",
      "49 7.564781928212784 891 56\n",
      "50 1.6516183491218404 1570387 5677299\n",
      "51 0.40023834836756916 514381 968365\n",
      "52 1.3627944387375976 10425794 33504202\n",
      "53 1.7877093026553723 6940603 26428454\n",
      "54 0.05627664727211724 166197 210693\n",
      "55 0.15577982007722374 12522731 18582774\n",
      "56 1.2583154434236141 199794 613412\n",
      "57 0.025631982533610202 3975386 4665625\n",
      "58 0.13655795730828182 684879 991065\n",
      "59 2.662351060152258 122293 23920\n",
      "60 0.097189334810069 296506 404973\n",
      "61 0.7391019005005068 70216 165885\n",
      "62 0.24315877737375013 162281 265720\n",
      "63 0.8098046490107241 1187959 2921592\n",
      "64 0.20874273691919154 7978 12599\n",
      "65 0.02899296886267256 197 166\n",
      "66 0.24899218386478728 16 27\n",
      "67 0.006850199668123952 151006 139011\n",
      "68 1.99000697634151 295222 72027\n",
      "69 2.8889799681156876 85304 466814\n",
      "70 0.6506542317819869 2470485 1102723\n",
      "71 0.0026205406242199854 3985386 4194715\n",
      "72 0.003930458011739886 241851 227154\n",
      "73 0.19521618098647084 8 13\n",
      "74 9.2378569037773 54234 1133097\n",
      "75 2.523023639620639e-05 23781044 23900796\n",
      "76 4.683718963764685 105 922\n",
      "77 1.3002492201011302 5880870 1880278\n",
      "78 2.9837649762146943 3117952 17540885\n",
      "79 0.0006480014352024919 38121985 37163804\n",
      "80 0.0024037561986201667 10475845 11002255\n",
      "81 0.8180411519888616 16 41\n",
      "82 0.03324115007177121 669 803\n",
      "83 0.2041696050465891 16495358 25917809\n",
      "84 3.708880727756425 366648 53439\n",
      "85 0.1356783889826072 43154 29857\n",
      "86 0.011317767655718156 33950 37761\n",
      "87 30.396938247730404 575309 2319\n",
      "88 7.473171850363382 3086003 47493236\n",
      "89 0.053043636134154616 277 349\n",
      "90 1.105087071784216 1447535 4141648\n",
      "91 0.051626633436557746 3742580 4697311\n",
      "92 2.667836386886923 3305415 645461\n",
      "93 0.07132960219442133 219536 286745\n",
      "94 0.0021440744128193084 147783 141096\n",
      "95 0.03819616730374593 39627 32592\n",
      "96 2.4515082313718706 1506323 7209513\n",
      "97 0.12053333506396222 2899345 4102777\n",
      "98 0.07672167814228849 247 187\n",
      "99 0.17586562202881614 12370198 18814984\n",
      "100 27.0358546524194 4 905\n",
      "101 0.7669326564689571 26058 62558\n",
      "102 1.3352696747862447 749126 235890\n",
      "103 1.0249640732777094 310 112\n",
      "104 2.4930985626834965 2164 10499\n",
      "105 0.01818337947877196 944033 1080314\n",
      "106 0.08629559475891746 54 40\n",
      "107 0.09857190876476192 60536988 44225118\n",
      "108 1.0045397886498055 208137 567061\n",
      "109 0.1497942277520779 22498975 33132092\n",
      "110 0.30006765123509493 664161 1148612\n",
      "111 4.207711339912686 35 279\n",
      "112 0.0 6 6\n",
      "113 0.4853471526681074 883581 440237\n",
      "114 3.2908269466907414 139622 856632\n",
      "115 0.08863513970905784 716485 964948\n",
      "116 0.6334783962660067 3635419 8057780\n",
      "117 8.192193882793426 34 1\n",
      "118 0.00808583441979001 15267 13954\n",
      "119 0.3005763278396671 13788 23857\n",
      "120 1.1419694381753258 126324 367780\n",
      "121 11.245336615159252 285 9\n",
      "122 0.002373589071466276 4008688 4208825\n",
      "123 1.287967473404333 809271 2517499\n",
      "124 2.427814650747429 27 132\n",
      "125 2.9274517647628913 966253 5347617\n",
      "126 0.4260028623559509 336525 646364\n",
      "127 0.20659743493840044 27527221 17472801\n",
      "128 0.004390888751980507 1567220 1466736\n",
      "129 16.511589005185883 546373 9391\n",
      "130 2.1927231032692225 46570 10592\n",
      "131 3.110814178552116 29198 170354\n",
      "132 8.5221291629142 90907 1684318\n",
      "133 5.946923612927419 51981 4536\n",
      "134 1.0177000534042968 511943 1403923\n",
      "135 1.343830214629752 305 95\n",
      "136 4.6074837148975325 104112 890676\n",
      "137 0.980889592940098 34 12\n",
      "138 0.6550069200513644 213846 480383\n",
      "139 0.6293490901845118 2461224 5441062\n",
      "140 0.0025611635169034866 210400 221322\n",
      "141 0.627173556616672 205530 453746\n",
      "142 0.39073805457038435 1202263 2246327\n",
      "143 11.10358689763197 0 27\n",
      "144 1.69725006984558 31746 8627\n",
      "145 0.0017929574307570906 1205 1155\n",
      "146 7.721895850324711 338767 5454222\n",
      "147 0.014568425223862449 1528997 1355150\n",
      "148 3.0653251982242193 522538 3009431\n",
      "149 3.0926995016466483 1334 229\n",
      "150 33.973850206574085 15336 5213346\n",
      "151 8.209651740387985 3357688 191283\n",
      "152 3.2818270080865735 347849 2128882\n",
      "153 0.0024337149956366886 314019795 298904262\n",
      "154 1.040253984387511 31091 86217\n",
      "155 0.15033012899968032 575965 848756\n",
      "156 1.193081713037108 9857055 3306547\n",
      "157 0.1298340416101831 76599492 109827984\n",
      "158 0.6357423609638571 698650 314761\n",
      "159 0.12385872615514308 171021 243161\n",
      "160 0.6654472510774385 51 22\n",
      "161 0.022673825931510568 2404613 2795378\n",
      "162 0.9765845984605952 977853 2626961\n",
      "163 10.708551009018654 4671 123220\n",
      "164 0.2648335102015008 894522 1496538\n",
      "165 0.1985667348159554 1639322 2559703\n",
      "166 3.8291365866874836 1024816 7252396\n",
      "167 0.0037039775275748365 11922543 12670688\n",
      "168 0.2124212768229567 20570461 32614206\n",
      "169 5.773644263062545 2071663 22901284\n",
      "170 0.36261521520046497 68 125\n",
      "171 7.30725743536613 435489 29173\n",
      "172 6.333541116124654 535372 6631699\n",
      "173 0.29890065001538224 255037 440596\n",
      "174 5.536364467942249 235520 2476862\n",
      "175 0.8641210282304861 726880 1841514\n",
      "176 0.5942077001363492 624 1350\n",
      "177 0.0035617600836580733 396 373\n",
      "178 0.10084817463105894 14302437 10411010\n",
      "179 0.07432601387488968 102299 134361\n",
      "180 0.10963246591605535 1132589 1577142\n",
      "181 5.137052572951191 88752121 9201261\n",
      "182 0.06855211026549891 1965207 2553391\n",
      "183 16.62631160645319 1710 28\n",
      "184 0.9670268969471258 60458 161634\n",
      "185 1.2558765177639308 38745815 12633648\n",
      "186 16.01890485346821 1874950 102610978\n",
      "187 0.5416414250814501 237886 496586\n",
      "188 1.6790439215075657 360066 1315627\n",
      "189 0.5527751977607808 25643 53935\n",
      "190 0.17766303036992206 9774 6412\n",
      "191 4.9959350100331426 15258120 142632403\n",
      "192 9.00891550911769 921824 18542875\n",
      "193 0.44825360499384437 2411750 4710859\n",
      "194 0.4804530139182014 3 1\n",
      "195 0.18270838581137297 22 14\n",
      "196 0.012659138380204181 157807 141014\n",
      "197 0.056422602472542896 28054877 35576913\n",
      "198 4.765964589151599 286864 32326\n",
      "199 5.034389798487102 197 20\n",
      "200 0.16549016880719497 124817 187477\n",
      "201 0.0 1 1\n",
      "202 0.3631876837095733 9088880 4974879\n",
      "203 0.048201875055624364 3217975 4008037\n",
      "204 11.74347215118749 24856 765105\n",
      "205 0.032522881153061324 66485452 55514383\n",
      "206 0.01671089753222116 24260107 27607955\n",
      "207 2.861767725079994 37 6\n",
      "208 1.9560246959014573 808 3275\n",
      "209 0.1237659565765908 12073578 17164157\n",
      "210 0.10070139549853532 9869352 13555184\n",
      "211 34.578520842788954 152 54763\n",
      "212 0.09291247791727476 4669455 6333531\n",
      "213 0.3650572708863974 85 46\n",
      "214 1.4673478211388467 47621 159913\n",
      "215 0.07575472650064208 17991 13662\n",
      "216 19.485125599020716 12 1073\n",
      "217 2.638921591641918 334 65\n",
      "218 0.17705470394518236 1942333 1275210\n",
      "219 5.973528838413836 1140767 13141590\n",
      "220 32.649858713003326 469474 1548\n",
      "221 0.680966432054144 84 193\n",
      "222 37.322906027512246 3 1799\n",
      "223 0.19500946747843229 3052 4747\n",
      "224 0.9288704279392181 1231286 3227923\n",
      "225 0.003545765104405617 100757 106939\n",
      "226 3.3083354513824 9387532 57873844\n",
      "227 0.09400830315435132 56077 41269\n",
      "228 0.3952812591733394 1186 632\n",
      "229 7.058357261376046 56 3\n",
      "230 0.0055615815116840505 14718450 15858059\n",
      "231 0.07345904347257 1373405 1800972\n",
      "232 0.17719225746868897 2661733 1747236\n",
      "233 0.840489256917792 406 1017\n",
      "234 3.4838495208067783 96938 626775\n",
      "235 0.05628516954718109 2440156 3093512\n",
      "236 18.356888796533323 781678 10771\n",
      "237 0.6145561683524957 331182 725315\n",
      "238 11.486160444937562 799064 23684263\n",
      "239 0.2109924071714948 20974 33203\n",
      "240 0.9133042136863353 4106536 10678683\n",
      "241 3.450444035064641 8140 52166\n",
      "242 6.861564159839764 9145061 125546073\n",
      "243 5.965059041176741 22 1\n",
      "244 2.343074738644626 6495 30020\n",
      "245 0.2693025337832163 846340 1422065\n",
      "246 0.9397060254290202 841568 319220\n",
      "247 10.20833739253143 128405 3134565\n",
      "248 5.719800252916276 2024687 185222\n",
      "249 0.09001365975112217 1806955 2439190\n",
      "250 9.639147867181268 137806 6178\n",
      "251 0.23667503174901405 20923 34034\n",
      "252 0.041623419149790064 106592 86920\n",
      "253 13.768228670492974 3310 80\n",
      "254 0.22851368934404723 918640 1481671\n",
      "255 0.0005716249768193461 4707359 4596147\n",
      "256 0.06969758934249816 6868617 5274900\n",
      "257 11.10358689763197 55 1\n",
      "258 1.3007911136918306 36531640 11677412\n",
      "259 1.6160474806661127 21549450 76829516\n",
      "260 1.9311020215946264 112462 451359\n",
      "261 0.9657578968865301 180103 481190\n",
      "262 0.038868017270723136 888877 729828\n",
      "263 0.7306773649991077 229876 540416\n",
      "264 0.29540883920732225 1287 2217\n",
      "265 0.014370526415892858 1311417 1478437\n",
      "266 0.05416902393094556 330199 416730\n",
      "267 0.06378847122152474 72536695 93378056\n",
      "268 0.07957725705450028 9029977 11972882\n",
      "269 0.9258013036851472 108183 283161\n",
      "270 1.4976263070418956 4 16\n",
      "271 6.436623742731023 222924 2818289\n",
      "272 0.07594770765638433 1260074 1659899\n",
      "273 0.23001708957300107 703462 1136394\n",
      "274 0.01953421054937451 16384215 18841902\n",
      "275 6.143445019259774 414269 34740\n",
      "276 1.3862776115388804 43994747 142804015\n",
      "277 0.027023895064529475 2237642 2637449\n",
      "278 0.16063987064879526 284269 424419\n",
      "279 5.308474268251841 69 700\n",
      "280 0.17566236110447073 12443048 18921201\n",
      "281 1.1807780086538517 55 165\n",
      "282 0.7535361748007622 352140 838903\n",
      "283 5.965059041176741 13 160\n",
      "284 1.3388435152492484 154 492\n",
      "285 0.07889575306296571 963780 1276334\n",
      "286 33.70583361261199 52850 17555966\n",
      "287 0.18989263050821975 14389 9306\n",
      "288 0.6500714333430331 169224 378985\n",
      "289 0.2511269513846958 220645 133677\n",
      "290 3.08718730228896 5983912 1032553\n",
      "291 0.41598229687455457 518214 271896\n",
      "292 0.0 7 7\n",
      "293 2.2240083096214214 205967453 46358618\n",
      "294 0.013513159603284032 1317858 1173231\n",
      "295 4.645266849870802 12841 1487\n",
      "296 10.187888244796481 145 5\n",
      "297 0.12503869810457943 599124 853271\n",
      "298 0.15916161448682617 1248826 1861076\n",
      "299 0.14251135449091196 2039161 2974409\n",
      "300 10.021803492290772 805 33\n",
      "301 0.10583159861090156 19101594 26445592\n",
      "302 0.12914967252854945 73 105\n",
      "303 0.7154762105915319 121011 51936\n",
      "304 0.1500943106559364 304717 448902\n",
      "305 0.3217927830328236 96094 169457\n",
      "306 15.21432510225974 127182 2572\n",
      "307 9.619161632541404 577 25\n",
      "308 8.283463231395661 1940767 34507765\n",
      "309 0.13868990386793287 4476346 3084520\n",
      "310 4.1645271354551685 183586 1412908\n",
      "311 0.04309586810442819 1985122 2443120\n",
      "312 0.4439822602118319 330 169\n",
      "313 0.08276097481015171 7 5\n",
      "314 0.3613960012688387 365413 666601\n",
      "315 0.5162779552068611 257893 529044\n",
      "316 0.16971784567733147 21787741 32894838\n",
      "317 1.3352274432093298 234 73\n",
      "318 0.2079848283939336 178441 281551\n",
      "319 19.05514142598841 235 2\n",
      "320 15.707582509334623 188 9946\n",
      "321 0.31316983010658395 3 6\n",
      "322 0.2225006334837436 129083 206884\n",
      "323 0.07289931668367862 11749164 15390968\n",
      "324 0.12939858719566197 247749 355007\n",
      "325 9.962317080005114 727 30\n",
      "326 0.391945511839516 1545432 826337\n",
      "327 1.1686786100488094 409756 139004\n",
      "328 0.0003246665405500505 55 54\n",
      "329 4.1488879856026495 45 5\n",
      "330 0.4804530139182014 4 9\n",
      "331 0.03132754013379589 717872 856871\n",
      "332 0.11010172897681003 752784 1049002\n",
      "333 8.85774765878833 9902017 504867\n",
      "334 0.5869384550860869 962086 447190\n",
      "335 0.5726935063209128 712222 334161\n",
      "336 2.976310563831287 43 246\n",
      "337 0.7511692019490221 35546 84567\n",
      "338 0.19793143027248275 133200 207836\n",
      "339 2.338918594308346e-06 4589762 4582748\n",
      "340 0.5808574589340464 6 14\n",
      "341 1.23412798231758 1284983 3902663\n",
      "342 3.409683099213326 75785 480316\n",
      "343 0.5956148901080959 96938504 209733506\n",
      "344 5.050372192253296 118 1125\n",
      "345 7.537428346734661 6 108\n",
      "346 0.08729910110097398 63 85\n",
      "347 0.24346733177085048 707355 1158588\n",
      "348 0.82527614325034 7013 17397\n",
      "349 1.2052715650078982 222690 667562\n",
      "350 1.5976270009285845 1393 4933\n",
      "351 1.1525864117661488 129025 377510\n",
      "352 0.0833085195467069 3247934 2433637\n",
      "353 0.48554682363580004 198150 397756\n",
      "354 0.04427107782577861 481231 593926\n",
      "355 0.1313031008371111 540732 776877\n",
      "356 0.2603304813239175 540087 899606\n",
      "357 4.065548178151646 4588 610\n",
      "358 3.6544673177206795 22470762 152000095\n",
      "359 1.2246766270863776 6036789 1996151\n",
      "360 0.027213333736247732 3016284 3557259\n",
      "361 0.43418499731897436 66599 128718\n",
      "362 9.539188547889237 1675333 76340\n",
      "363 0.02725134563816439 781703 662748\n",
      "364 1.4836736578923282 25063730 84731145\n",
      "365 0.00026256285526946016 2099439 2133735\n",
      "366 9.680650408860574e-06 1236433 1240286\n",
      "367 0.32101865918848504 303152 534227\n",
      "368 0.013279959206562972 106514300 94920580\n",
      "369 2.555676869739802 2024979 10016244\n",
      "370 0.0003432643391712657 36088826 35426350\n",
      "371 1.963973231915199 166842 677541\n",
      "372 5.315841225221419 32 330\n",
      "373 0.1664066247519847 6483 9749\n",
      "374 0.1417869254621764 154335 224904\n",
      "375 0.365602085273793 63144 115593\n",
      "376 0.38808201857987995 2392002 4459752\n",
      "377 0.34900791328218633 6984 3868\n",
      "378 0.00029992395557769895 1676197 1647418\n",
      "379 0.03986831421737602 24907937 30412601\n",
      "380 0.0 1 1\n",
      "381 0.4679891774908144 439 221\n",
      "382 0.7951138611109447 106 260\n",
      "383 12.754344747639593 548998 19524378\n",
      "384 0.8181727437594837 4654481 11500144\n",
      "385 0.04335186454480201 3080387 3793414\n",
      "386 0.0 19 19\n",
      "387 0.0061959609675418794 55028592 50863139\n",
      "388 0.06099537832474544 1176784 919259\n",
      "389 0.07454617625800775 1380259 1813580\n",
      "390 0.06484112262308604 10275 13255\n",
      "391 1.5182505854435537 2635496 768663\n",
      "392 1.1237655760493237 6312119 18220542\n",
      "393 0.8144252845489465 5056578 12467749\n",
      "394 6.779029414699822 420 5688\n",
      "395 0.023781351598618074 24924550 29080426\n",
      "396 0.019533400536477192 22 19\n",
      "397 13.927581688522334 200 8393\n",
      "398 0.048381369483370025 18359 22876\n",
      "399 0.03423452623056454 991623 1193170\n",
      "400 2.3556511205480466 512745813 110494657\n",
      "401 4.740031829630808 8358424 947534\n",
      "402 0.025586766587368588 1697 1446\n",
      "403 3.0632447450019185 459055 79754\n",
      "404 1.927008945105714 39008 9733\n",
      "405 1.9218120556728056 127 31\n",
      "406 0.06638504453013068 677920 877154\n",
      "407 1.5706464300609733 43642036 12463027\n",
      "408 0.3127983039302073 69255 39587\n",
      "409 0.5053901690379794 6223816 12670688\n",
      "410 3.5575324464839224 140752 21345\n",
      "411 9.446161425105089 965393 20868520\n",
      "412 0.18604822520386174 978 635\n",
      "413 0.2548828528810962 456710 756656\n",
      "414 2.7414042976231126 198 37\n",
      "415 0.03710376123443057 1185645 977911\n",
      "416 0.12265561830486461 248929 353326\n",
      "417 6.53079575872256 37881 487854\n",
      "418 0.07894782650231712 5341135 7073920\n",
      "419 0.020890424230078808 19826951 22910089\n",
      "420 0.1231571220913202 502741 714093\n",
      "421 0.00046855288271947706 4737259 4840920\n",
      "422 0.29064579979927413 200535 343816\n",
      "423 6.412233796300422 23577 1873\n",
      "424 0.9620260235635408 7 2\n",
      "425 1.7754359141503249 194673 737879\n",
      "426 1.2409575765167653 1052087 3205151\n",
      "427 0.5558357105806591 51637 24500\n",
      "428 1.5694150552333266 1 6\n",
      "429 0.5529975345298489 75856 36060\n",
      "430 1.389744205886427 1140 350\n",
      "431 23.37425973309325 24 3144\n",
      "432 0.03268290618302272 1909323 2287668\n",
      "433 0.22090341150416287 7 4\n",
      "434 15.39775134461287 30917 610\n",
      "435 8.087195646571274 181000 10534\n",
      "436 13.755474962957955 114198 4660042\n",
      "437 0.28709933926515085 7217053 12332811\n",
      "438 2.092209843642909 68234 289861\n",
      "439 10.729892925362291 1138770 30132403\n",
      "440 2.0066248317563478 2875417 11855025\n",
      "441 0.010430433764556275 92 102\n",
      "442 4.248246501389507 107288 13658\n",
      "443 12.640499838336531 69 1\n",
      "444 5.08982963327956 104 10\n",
      "445 0.0005474477021381295 4106790 4204012\n",
      "446 0.37947649569937303 2259129 1220136\n",
      "447 3.594379711277746 34432 229269\n",
      "448 0.21018078784479097 353776 223678\n",
      "449 1.8117730554826086 1084950 4168498\n",
      "450 3.4252012984602033e-06 448345 447516\n",
      "451 0.004165220710624722 15 14\n",
      "452 13.831144831090361 1654 68225\n",
      "453 0.011057745211123507 5361031 5955482\n",
      "454 0.01177343525624416 13048014 14543460\n",
      "455 0.13698535769059542 2000 1381\n",
      "456 0.08967336465215858 4662219 3455739\n",
      "457 0.014457413590495224 6596940 7439807\n",
      "458 18.631564187489722 1717480 22923\n",
      "459 21.57913796477143 5740 597627\n",
      "460 0.025671610468018583 48545 56981\n",
      "461 0.34762697323335207 239743 432321\n",
      "462 0.12650577339876 1623536 2317023\n",
      "463 0.04905152337113266 121767 97576\n",
      "464 0.22284311333275086 1153141 1848829\n",
      "465 0.4198532177283563 222619 425568\n",
      "466 0.0002878236678063009 11353 11162\n",
      "467 0.05174853379314513 2308251 2897862\n",
      "468 0.418126690419011 62 32\n",
      "469 0.41197641119460554 18 9\n",
      "470 5.301898110478397 19 1\n",
      "471 2.8698483897206866 623322 3391838\n",
      "472 2.742047155220336 133923 701474\n",
      "473 1.518989613503829 39978 137115\n",
      "474 0.11321356601688148 4 6\n",
      "475 0.003164374625356591 3775162 3993612\n",
      "476 11.79226812056771 185 5\n",
      "477 0.005364162671983677 3071961 3305397\n",
      "478 5.445987827366744 908231 9369020\n",
      "479 2.380921207881254 1410594 301491\n",
      "480 0.0030170814973238627 2483671 2350927\n",
      "481 10.560814742556937 207 5362\n",
      "482 0.005434202014633683 3041678 3274373\n",
      "483 9.950970945113456 195573 8342\n",
      "484 1.9876992457033469 3022521 12378355\n",
      "485 5.916572761119778 1905602 167360\n",
      "486 0.3884423133578967 924735 1724615\n",
      "487 0.8702099581718085 1369 538\n",
      "488 0.1388361145773365 503985 347213\n",
      "489 3.9697779611674426 21 2\n",
      "490 10.210203930499686 635466 26023\n",
      "491 0.14374212588280885 10362 7092\n",
      "492 0.009690447935478456 28 31\n",
      "493 0.0026273659515571013 237499 225632\n",
      "494 1.794995296819754 10 41\n",
      "495 10.42657192951538 50 1287\n",
      "496 0.2824543073440391 558764 950694\n",
      "497 1.8867354268388144e-06 346047 345572\n",
      "498 0.40667529969617083 33238 62892\n",
      "499 5.185946287375705 38 3\n",
      "500 4.78310171725257 974 8685\n",
      "501 0.10534228925198226 14545502 20122661\n",
      "502 6.897080406991599 2829 39114\n",
      "503 3.4150027823475626 310 48\n",
      "504 5.358380805666773 64 657\n",
      "505 0.15192705303703838 28459670 19273089\n",
      "506 5.181252662086764 10701 104236\n",
      "507 2.098532422600076 24257 5697\n",
      "508 9.091273432413972 135141 6626\n",
      "509 12.841607982273604 107 2\n",
      "510 0.11951304210216966 418987 592024\n",
      "511 2.0007119097553447 34 143\n",
      "512 2.1275577844447118 42 9\n",
      "513 3.344598487103173 760881 4737678\n",
      "514 0.03378208960174891 2205265 2650231\n",
      "515 0.0889207295393269 2525573 3403022\n",
      "516 0.02790700669487356 1505927 1274246\n",
      "517 0.4269496743214693 253514 487278\n",
      "518 2.3517860809561824 329491 1527068\n",
      "519 2.4190137911865515 9542848 45200582\n",
      "520 7.550017523891892 19144 298798\n",
      "521 15.703119045676283 19 1051\n",
      "522 1.8685922039294462 870 221\n",
      "523 2.076086407356595 2727 11523\n",
      "524 0.10141277886331593 7 10\n",
      "525 0.5415841577351903 1337390 640693\n",
      "526 2.0427250844571017 26405 6323\n",
      "527 4.579883184141647 67 7\n",
      "528 7.364079912114239 5852 88290\n",
      "529 93.99897318027026 190 3102069\n",
      "530 0.19439822377033067 53367 82939\n",
      "531 8.419945287387508 26810 488102\n",
      "532 0.11760241822735588 13610257 19177847\n",
      "533 0.04971799776251957 16891069 21110285\n",
      "534 67.28929674661323 562393 153\n",
      "535 5.813445108371814 104611 1166037\n",
      "536 0.48298163045371084 135778296 67765592\n",
      "537 0.017830632816244415 7 6\n",
      "538 0.15801397747206478 175251 260794\n",
      "539 0.0018741526627728245 28598647 29863915\n",
      "540 0.008274549002301887 2730121 2990111\n",
      "541 0.7805738562942335 74 30\n",
      "542 0.29727121398425715 91660738 158114582\n",
      "543 0.025051362317218055 54546469 63900629\n",
      "544 0.07373688229284024 1038031 1361887\n",
      "545 0.345493163436756 4 8\n",
      "546 0.22525648631184828 432 695\n",
      "547 2.8248732779158487 2786821 519008\n",
      "548 0.3396578967494607 249259 446436\n",
      "549 0.005287057445907027 1247583 1160088\n",
      "550 0.03442975897065419 619 514\n",
      "551 10.845688678572834 86 2342\n",
      "552 0.2122998513394824 18978 30086\n",
      "553 1.533521890982941 6475059 1876865\n",
      "554 0.028765351601619603 252554 299235\n",
      "555 0.4804530139182014 0 1\n",
      "556 0.3382693174756671 3625504 6485719\n",
      "557 0.14072204515701559 68517 99705\n",
      "558 0.01237911293923343 6919003 6190465\n",
      "559 0.20469573731400895 175079 275248\n",
      "560 0.020057020395727397 352286 305766\n",
      "561 1.2484669994299937 140 430\n",
      "562 0.18834025291285864 45769 70640\n",
      "563 0.2699672341243698 203290 120910\n",
      "564 0.11034116031481413 4241566 5912738\n",
      "565 0.0023951454377598366 4007330 4208328\n",
      "566 0.28514211893562535 23113 39425\n",
      "567 0.11655287656306557 6524132 4637194\n",
      "568 1.040227810261739 24177324 8718956\n",
      "569 0.4804530139182014 1 3\n",
      "570 0.021564183480327077 3871022 4483330\n",
      "571 0.49747258369802927 6105046 12359600\n",
      "572 135.64468341791064 2400486 20\n",
      "573 1.700356616193851 66520 245057\n",
      "574 0.7145175521187606 345445 804424\n",
      "575 0.04134661206985997 56623 69391\n",
      "576 0.5191073751118274 224701 461861\n",
      "577 0.7313418395228254 3908756 9192653\n",
      "578 0.24412913873394787 909223 1490229\n",
      "579 1.0098734118776111 328945 120417\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "580 0.3369088036125197 1289 2304\n",
      "581 90.10017612927676 53018 3\n",
      "582 0.044350888686429855 886675 718296\n",
      "583 0.26897979062873345 1032516 1734348\n",
      "584 0.5362003084461577 1131737 2353753\n",
      "585 3.7343715715740577 12994 89748\n",
      "586 0.003174950092624149 3509687 3713124\n",
      "587 1.3212409728590844 8154422 25739143\n",
      "588 0.05751859600577802 1407164 1107102\n",
      "589 0.37869299784481125 3239246 5993773\n",
      "590 10.273212851193492 49837 2020\n",
      "591 0.23519858849858546 2080611 3379185\n",
      "592 0.41753623706115056 243089 463868\n",
      "593 2.0681092873440803 10401 43819\n",
      "594 1.7217611096631986 2045114 7595932\n",
      "595 20.737794743853403 3 379\n",
      "596 2.653388039024522 38890 198280\n",
      "597 6.853019402277258 444002 32394\n",
      "598 0.1623801441215119 828199 1239196\n",
      "599 0.06249314061374677 321286 250221\n",
      "600 13.020968320833385 405 10\n",
      "601 2.2020566459812554 116 515\n",
      "602 4.475303053591332 181097 1501964\n",
      "603 0.7351371118881453 795111 1874100\n",
      "604 0.049774116763183815 2602871 3253451\n",
      "605 0.13328353698382978 315164 454035\n",
      "606 0.11217391385206135 1131836 1582119\n",
      "607 1.4296998614017065 24178360 79930589\n",
      "608 0.629247614601889 4094561 9051323\n",
      "609 0.7623437201686781 5045058 12079771\n",
      "610 1.0621699576532742 158056 443000\n",
      "611 0.2897273436420708 14606 8526\n",
      "612 16.807370912684988 1204304 72644813\n",
      "613 0.03861345292220486 36501 29989\n",
      "614 0.4959509736746082 268 543\n",
      "615 7.514500203860734 1193 76\n",
      "616 14.972847073880542 26185742 546477\n",
      "617 2.4059157226817875 32095 151385\n",
      "618 0.19917217797246947 15 24\n",
      "619 0.7950533713080298 19315 47114\n",
      "620 1.957697076329687 2249355 9114086\n",
      "621 2.5106510952109793 11101 54142\n",
      "622 2.9510621575148743 22022 3951\n",
      "623 0.21366293217842242 82108 130357\n",
      "624 2.7497203540362505 41 7\n",
      "625 0.10880436942030959 82785 115135\n",
      "626 0.00032388590397648795 755780 742300\n",
      "627 1.7925816864070834 35924887 9417380\n",
      "628 2.2827058735802908 961142 212142\n",
      "629 0.19391471217437942 1611643 2503305\n",
      "630 1.8251883304160503 3218361 833498\n",
      "631 0.025657946214261058 970272 1138831\n",
      "632 0.009084030374332749 49 54\n",
      "633 2.1272086870598206 6493894 27920406\n",
      "634 0.8395887053184748 13 34\n",
      "635 0.8676046532314869 287 730\n",
      "636 1.394170367148539 16328351 53178431\n",
      "637 5.953396815131017 8301251 95236636\n",
      "638 0.6469556404476422 107269 47990\n",
      "639 0.20941751753138071 52459 33195\n",
      "640 0.09732946077578492 932746 1274246\n",
      "641 1.6937811988218614 72257 19663\n",
      "642 0.5124299310434735 2770847 1354335\n",
      "643 2.915367482394311 201 1113\n",
      "644 0.12344709605698678 254989 362336\n",
      "645 0.1138962819871466 10328050 14473925\n",
      "646 0.20397100586328887 858006 1347819\n",
      "647 0.12024293706590783 768079 543012\n",
      "648 2.214119017995327 376143 84942\n",
      "649 0.0168032613585731 2170888 2471348\n",
      "650 0.16440195389316548 2 1\n",
      "651 2.2444413604351108 21431 4790\n",
      "652 1.9794060138364826 11 48\n",
      "653 0.36537174791695726 3603205 6594815\n",
      "654 9.838483149938158 19186 441804\n",
      "655 2.3258463242185594 27503 126393\n",
      "656 0.021747103478784748 33228 38508\n",
      "657 0.0011624152470995144 178 172\n",
      "658 2.939022098822393 1871128 10390579\n",
      "659 6.055025660922303 1264 107\n",
      "660 6.202325413872001 14 180\n",
      "661 0.0941183882970925 543583 399972\n",
      "662 0.002292635310380863 922994 879841\n",
      "663 5.499052243671553 312 29\n",
      "664 0.09904079598350232 734396 1006019\n",
      "665 0.0002772209967867621 3853933 3918638\n",
      "666 0.268931207300667 560565 941554\n",
      "667 1.1150325826903076 788827 2267649\n",
      "668 4.59751533244855 42490 362662\n",
      "669 0.002631002049127918 303 319\n",
      "670 9.610456070928016 10677 480\n",
      "671 0.8615514016957593 132892 52527\n",
      "672 2.0298887436922075 120898 502550\n",
      "673 4.316647104031585 558 69\n",
      "674 0.43032122596911004 2161964 4166207\n",
      "675 0.0178306328162444 55 63\n",
      "676 4.590005821799838 5587082 47602591\n",
      "677 0.10402012989885062 986934 1362566\n",
      "678 1.679235672665237 8440 2309\n",
      "679 10.171691808464072 3959957 96114712\n",
      "680 0.2722890656029318 643687 1084665\n",
      "681 0.03324115007177121 14 17\n",
      "682 0.8395887053184748 5 14\n",
      "683 1.0316215116420024 2592997 7159945\n",
      "684 0.44710106104367336 120 61\n",
      "685 3.3299592270428575 50261072 8104449\n",
      "686 0.16646463206221543 6502331 9778260\n",
      "687 0.13768385588384105 83113 57348\n",
      "688 2.460555897932695 47 9\n",
      "689 0.02023716969708374 962737 1109914\n",
      "690 73.13863382747547 147 766296\n",
      "691 3.0483015676564857 123078 705399\n",
      "692 0.09449907031347807 311207 423209\n",
      "693 0.24792341489519218 17828228 29332677\n",
      "694 0.6035635545735027 1086978 499827\n",
      "695 0.010537875345791325 40 36\n",
      "696 0.12865891969764492 40789 58388\n",
      "697 0.40559620184250267 19292137 36472625\n",
      "698 0.028569590378757684 10205612 12084978\n",
      "699 0.2523817368287483 1167871 1930075\n",
      "700 1.3303428926889156 41752210 132311278\n",
      "701 0.16907956281270295 4075138 6147820\n",
      "702 6.02596931264277 6129480 526409\n",
      "703 0.10778182319416688 331251 459978\n",
      "704 0.6937376070685242 22 9\n",
      "705 0.08242191390358433 2953287 2216272\n",
      "706 1.2790426307200873 70 219\n",
      "707 12.282257675328736 5346715 160714\n",
      "708 1.3558383152369409 756267 2423093\n",
      "709 7.733876223833177 529672 8546221\n",
      "710 1.2038445464527563 191582 573936\n",
      "711 0.6391696160549696 1650696 3671786\n",
      "712 0.1080225914075334 510219 708754\n",
      "713 0.4874880644166571 212365 426884\n",
      "714 5.084676053253633 541905 56835\n",
      "715 27.362010558481746 224 42064\n",
      "716 0.007332485622386421 3802 4142\n",
      "717 0.5415795333070056 37331454 77925803\n",
      "718 1.7767724237479163 31266 8244\n",
      "719 3.17245934479904 842 141\n",
      "720 0.0715634809031046 17122727 22374461\n",
      "721 0.015757287965518244 942084 1068085\n",
      "722 0.043428777276523924 1442271 1170959\n",
      "723 0.08347758025555475 14398066 19221295\n",
      "724 0.7122854795529522 85 199\n",
      "725 0.1587347782174347 95580190 64170926\n",
      "726 0.8184916531045682 25089896 10152905\n",
      "727 0.08251656461975893 18160224 13625959\n",
      "728 0.7840917245866925 14878522 6137571\n",
      "729 0.5915683995702838 57589806 124272958\n",
      "730 1.6888794214649008 707898 2596371\n",
      "731 6.9194880173173 187641 2604582\n",
      "732 0.2681177456947815 714122 1198535\n",
      "733 0.00147965476072287 105 101\n",
      "734 0.031083423713039873 2024146 2414405\n",
      "735 56.50131284990231 16546 8\n",
      "736 1.7290096952482787 98927 368451\n",
      "737 0.1376384449609133 276061 400062\n",
      "738 1.1157227733856423 947803 2725549\n",
      "739 1.9587481981639592 8428 34165\n",
      "740 35.42801616055955 9998 25\n",
      "741 0.2862860564857426 1623367 2771974\n",
      "742 1.5380118623258319 825149 2851872\n",
      "743 0.02915573684774464 6680713 7924627\n",
      "744 0.15391325694401564 26927623 39863872\n",
      "745 0.7575027807971907 16769131 40040288\n",
      "746 1.108784768875578 6106316 17501949\n",
      "747 21.98312616275289 44241 406\n",
      "748 8.733617285331905 1113 57\n",
      "749 0.09001502894384722 2498094 3372159\n",
      "750 0.0022282037719094827 11295 11841\n",
      "751 0.2543624353198817 338951 561269\n",
      "752 4.516692143312319 66 7\n",
      "753 0.9571427851984025 630722 1677740\n",
      "754 0.42860708047409635 815 423\n",
      "755 0.34643670654688136 8260236 14880356\n",
      "756 0.6255938725420495 321 145\n",
      "757 0.9031090155826116 537 207\n",
      "758 4.649576805223844 2835641 328236\n",
      "759 0.20368546511799707 2821333 4430555\n",
      "760 3.419210623825868 304 47\n",
      "761 0.09637788182118856 288976 394174\n",
      "762 0.11229957965628008 1974975 2761205\n",
      "763 0.010200636363342747 438382 484971\n",
      "764 0.15394884235301 1637945 1106364\n",
      "765 3.4393941677070132 17 114\n",
      "766 8.238328401827824 212569 3750033\n",
      "767 0.14247134398004593 8953 13059\n",
      "768 11.511289518891138 118 3\n",
      "769 0.5525391932306106 56933 119727\n",
      "770 0.011442534987842323 6208 6909\n",
      "771 0.06748458345610968 217595 167814\n",
      "772 4.114229628883778 8083909 1063450\n",
      "773 0.10463480408015641 15114161 20886545\n",
      "774 0.7918463254681101 55 22\n",
      "775 0.070379214618608 24682004 32180646\n",
      "776 9.554543448463395 43 1\n",
      "777 1.6009883584969944 23979 84988\n",
      "778 9.161614924612587e-05 3806879 3843492\n",
      "779 0.03847691540730863 6921656 5688802\n",
      "780 7.503268685799376 395 6127\n",
      "781 1.1281261127199238 982353 2841491\n",
      "782 2.9169965011740273 142 788\n",
      "783 3.3430260570693666 291121 1811909\n",
      "784 2.676159786858848 9735814 49984281\n",
      "785 5.849705216299891e-05 59879964 60339702\n",
      "786 0.017278497783438686 1453250 1274246\n",
      "787 15.428110907019928 16248 825421\n",
      "788 0.1635025581928779 3899284 2602411\n",
      "789 0.6082527590095181 595057 1297979\n",
      "790 0.27577900608925 855999 1447244\n",
      "791 0.33536149263797344 105767276 188735246\n",
      "792 0.08276097481015171 79 59\n",
      "793 0.04266397207336032 2749127 3379866\n",
      "794 1.201141146538163 11350 33962\n",
      "795 1.1270700343207707 118121 341501\n",
      "796 0.09208231263296217 375797 277438\n",
      "797 0.08206337775707363 64235 85543\n",
      "798 2.2749969797092056 130 591\n",
      "799 0.261322525595052 212058 353562\n",
      "800 0.017031227946281757 96863 85012\n",
      "801 0.9720874028280662 117522 315001\n",
      "802 38.10644890206276 1723836 3593\n",
      "803 15.882694893867363 1428 76882\n",
      "804 1.7704740940994386 16880 4461\n",
      "805 0.7134123382831214 242976 565439\n",
      "806 3.125345318835316 1053696 6172904\n",
      "807 0.9776672408506056 449222 1207478\n",
      "808 0.5629964391161741 4329373 9168317\n",
      "809 0.05543991010414634 285 225\n",
      "810 0.29744961097395056 595329 1027110\n",
      "811 0.06973403428842341 683758 525070\n",
      "812 0.1272170156336979 191764 273949\n",
      "813 0.8943360851947849 8299675 21368329\n",
      "814 0.4442774162723946 81886576 159473292\n",
      "815 0.0013234468504577524 147317 142054\n",
      "816 0.00022341202595078694 4068395 4008037\n",
      "817 1.094050218430959 36 12\n",
      "818 3.5298930304666363 31827 208334\n",
      "819 0.3017095536997744 764553 1324212\n",
      "820 4.9657075160839925 1361 12645\n",
      "821 4.307926391863451 103459 824468\n",
      "822 0.051229133488537554 1844069 2312463\n",
      "823 0.11744436521096575 204039 287440\n",
      "824 1.8255714505423884e-06 14729399 14709511\n",
      "825 0.08276097481015168 2 3\n",
      "826 6.865172963146687 4781884 65692339\n",
      "827 0.9719527132970687 33542356 89898894\n",
      "828 0.06614404977815633 3794046 2933652\n",
      "829 2.099640429953908 4150913 974641\n",
      "830 0.010180794417004375 41530 37544\n",
      "831 0.012875125362888293 2572 2296\n",
      "832 0.06826520082215792 10542398 8118366\n",
      "833 0.06143371786771769 1557364 1995417\n",
      "834 7.32000424767208 1808852 120891\n",
      "835 4.577580093478791 248779885 29283976\n",
      "836 13.515186630264418 78 1\n",
      "837 0.20145145135464512 2210751 3463106\n",
      "838 1.0962812584958812 13847182 39453245\n",
      "839 1.2956998193604978 14299458 44634620\n",
      "840 1.2188993165462878 32167 97028\n",
      "841 0.01428993975577918 85992 76303\n",
      "842 10.952661046721225 1628402 44570858\n",
      "843 0.5948301068107844 40474 18716\n",
      "844 21.965051225291877 374 40684\n",
      "845 2.0638832027973257 132662 558041\n",
      "846 0.12584971667104955 257699 367435\n",
      "847 44.50718572889161 64 51315\n",
      "848 1.1209812851445948 656823 1893495\n",
      "849 0.0404025711861162 7708067 6304499\n",
      "850 0.17109262693423666 3969824 6003576\n",
      "851 0.3778540605034896 757322 1400364\n",
      "852 9.776631449239337 4 113\n",
      "853 1.057388199233036 4971061 13900534\n",
      "854 0.4039170388580684 104930 198114\n",
      "855 0.67163618293833 1362436 600339\n",
      "856 0.007595448392550083 299519 326794\n",
      "857 1.7295468004386985 8773191 32681953\n",
      "858 6.721398065856785 2139 28598\n",
      "859 0.04960580121335961 3982941 3187691\n",
      "860 0.02527219428637296 86276 101142\n",
      "861 1.9738301501447801 20412007 83183915\n",
      "862 0.4804530139182014 0 1\n",
      "863 12.558898919876293 4 172\n",
      "864 0.11784699493931464 104373876 147122845\n",
      "865 0.013149548290581403 60988474 68398875\n",
      "866 6.2992133729084 1994 24543\n",
      "867 0.11321395591082381 246566 345193\n",
      "868 6.741276013551632 52285 701432\n",
      "869 0.03428102039849476 6394662 7695340\n",
      "870 1.5946395494652772 9225756 32615564\n",
      "871 0.036783643715315326 23459080 28418725\n",
      "872 0.53284977295537 35248 73141\n",
      "873 0.005454100782597171 2869636 3089586\n",
      "874 2.3795536051447885 5814729 1243355\n",
      "875 0.24259129915481653 1288813 2109092\n",
      "876 0.005684357961036083 9456036 10196535\n",
      "877 0.036498716427499314 534369 441439\n",
      "878 0.05545198894721895 74874930 59165385\n",
      "879 9.166965679516178 30294 625581\n",
      "880 0.17247631194729868 163340 107827\n",
      "881 0.0021611213838277636 10395110 9922924\n",
      "882 1.4099537836912228 5130 1564\n",
      "883 0.2152916846469875 5341 8495\n",
      "884 0.004534358235618928 67090 62721\n",
      "885 0.0017257761746185543 901590 864903\n",
      "886 0.07890983032613973 953091 719676\n",
      "887 10.280749636437985 33130885 1341924\n",
      "888 0.4453277525913697 2458713 4792092\n",
      "889 0.01747987100124151 2136750 2438779\n",
      "890 0.10406653857469095 195106 269384\n",
      "891 1.3690667362785358 28 8\n",
      "892 0.6139651868015694 112173 245576\n",
      "893 0.7546542908116893 965325 2301172\n",
      "894 3.6717855689125165 235630 34676\n",
      "895 0.016695671552061232 32 28\n",
      "896 0.37407267476497147 11700840 6347403\n",
      "897 6.668870976761825 73116 5526\n",
      "898 0.08720474184730896 2539193 3411496\n",
      "899 0.22379226429487686 1467923 2355883\n",
      "900 1.8244523399822274 92 358\n",
      "901 0.21538985788452525 2588861 4117801\n",
      "902 0.0 12 12\n",
      "903 0.035945415346365805 98683 119284\n",
      "904 1.2739405379972093 404 130\n",
      "905 0.4957872442189475 78977 39057\n",
      "906 5.738940143439077 2183 198\n",
      "907 1.7141264788735582 272626 1009641\n",
      "908 1.2566724095521902 1012924 3107616\n",
      "909 0.10721579014799593 1015736 1409242\n",
      "910 6.635258085063915 203665 15495\n",
      "911 0.011338731598618966 98 88\n",
      "912 0.022156862205851387 27542867 31963525\n",
      "913 3.220934912972036 1022 169\n",
      "914 0.0709506285554841 66789795 87174852\n",
      "915 0.11542109199206227 408531 573814\n",
      "916 3.924366819651561 463 63\n",
      "917 3.227274579432284 118317 713253\n",
      "918 0.13966395494900605 63 92\n",
      "919 1.1203606332959388 1378685 3973318\n",
      "920 24.864922403800847 680702 4648\n",
      "921 5.472346890522763 6628 638\n",
      "922 0.13643775271477382 356900 516374\n",
      "923 0.530381246074129 1357867 2812822\n",
      "924 20.027639905987805 14214335 161867\n",
      "925 11.432438297442216 78767 2316228\n",
      "926 0.006832894627434959 1686045 1552279\n",
      "927 1.7265631207808587 133707 497524\n",
      "928 0.9976854693276064 12935 35122\n",
      "929 1.787882625507973e-05 710 707\n",
      "930 4.17848999720594 779441 6019224\n",
      "931 0.10425637105704913 982235 1356575\n",
      "932 0.02563163163578489 963744 1131076\n",
      "933 5.235842561515445 68 6\n",
      "934 0.184720729480988 1636392 2515032\n",
      "935 0.20747275971675877 2366 1500\n",
      "936 0.20473097690575554 521669675 331809997\n",
      "937 0.2847765397708054 75532 128793\n",
      "938 26.193860663936196 2 500\n",
      "939 0.13981117760845685 781914 1136443\n",
      "940 3.2881883572464456 9667620 59270902\n",
      "941 0.0017824752756636 1816164 1894483\n",
      "942 0.018073955091301483 786 687\n",
      "943 0.25203849696873576 456 754\n",
      "944 0.585336182183673 138557 297781\n",
      "945 0.027907067202648312 131 155\n",
      "946 0.3087354090456807 250 143\n",
      "947 3.69107682242042 81 559\n",
      "948 0.05911062848640923 27102155 34561489\n",
      "949 0.032798619455321434 549569 458532\n",
      "950 2.3871997453975586 365289 1712565\n",
      "951 1.3445526332015907 11514375 3611227\n",
      "952 0.4804530139182014 8 17\n",
      "953 2.3349364159583708 181489 836519\n",
      "954 0.2118414575476867 393556 623586\n",
      "955 0.6511530159140226 81109 36192\n",
      "956 0.8489887082404359 97 38\n",
      "957 0.5029441306888537 25454 12524\n",
      "958 0.04101607755389411 479 391\n",
      "959 0.1891307566732558 542674 838321\n",
      "960 0.8213476218795698 481 1192\n",
      "961 0.000705775710531784 2767667 2695108\n",
      "962 1.3547221114214383 17824 5565\n",
      "963 0.4000258526538344 679269 360878\n",
      "964 0.48202516990026795 882 440\n",
      "965 0.13380998246651596 2316300 3339335\n",
      "966 2.8189200296255366 133 24\n",
      "967 1.060923905540754 1823478 5107743\n",
      "968 0.0775492789670364 1452288 1918641\n",
      "969 0.808939166985111 4084163 10039501\n",
      "970 3.4355987761787428 135 867\n",
      "971 2.8807867142543913 9413535 1724367\n",
      "972 22.60287028231383 6750 783630\n",
      "973 0.14933920287615624 804006 1183287\n",
      "974 0.06019175393716947 5537368 7077066\n",
      "975 0.016305218493308953 20590277 23394732\n",
      "976 16.491618069966428 94 5512\n",
      "977 5.735025307624307 964 87\n",
      "978 0.35833937073138866 975479 536096\n",
      "979 66.45128517448042 6938 1\n",
      "980 1.689092642301355 246 905\n",
      "981 0.29370746809413284 785401 1350382\n",
      "982 0.00022817273885249338 3846044 3904581\n",
      "983 0.3831649442276369 10521027 5665372\n",
      "984 0.2803423535476881 1295829 2200368\n",
      "985 2.906201743600711 1936904 10653088\n",
      "986 1.9079005357773189 16403 65286\n",
      "987 23.10799228149298 9184 1123999\n",
      "988 9.389134327045298 23 513\n",
      "989 4.402952277601769 73881 602320\n",
      "990 0.2110225858196377 3290429 5209011\n",
      "991 7.213931733152479 1114 75\n",
      "992 1.399747363251784 3084855 944967\n",
      "993 0.015484383479552437 2523623 2858028\n",
      "994 1.9157745369186558 406308 1621697\n",
      "995 0.08520170578813914 3163061 4235213\n",
      "996 0.46212323380064657 150165 296348\n",
      "997 0.4848856523262291 180530 89977\n",
      "998 0.22090341150416287 23 14\n",
      "999 0.129125222154804 1444806 2069517\n",
      "1000 0.2722095490632018 10718 18060\n",
      "1001 0.2231154152811944 791517 1269404\n",
      "1002 3.1207725184538444 2501187 14633839\n",
      "1003 2.9884190332591096 310672 1750129\n",
      "1004 0.0993719176869148 1243 1704\n",
      "1005 8.938404024798016 235634 4684433\n",
      "1006 0.6918783090648113 5896838 13547589\n",
      "1007 0.047358604688319504 3089458 2485255\n",
      "1008 0.015535739147248947 112 127\n",
      "1009 0.0012206278117373934 128673 133248\n",
      "1010 0.3186921968881464 767749 1350178\n",
      "1011 1.7559794790531558 728 2742\n",
      "1012 7.6068983928446094 204 12\n",
      "1013 4.996717953853562 44927 420058\n",
      "1014 0.020390999806835695 4448769 5131636\n",
      "1015 0.08432791970868767 10589 7920\n",
      "1016 11.288063457138202 206 5957\n",
      "1017 14.612882847515452 67400 1473\n",
      "1018 5.171567640086835 389041 40027\n",
      "1019 1.3196448357003763 18032 56880\n",
      "1020 0.07100795359133441 815345 1064313\n",
      "1021 1.5905726024971023 4696466 1330601\n",
      "1022 3.391809691684421 292 1847\n",
      "1023 0.17196070247502407 9838239 6498655\n",
      "1024 2.6620541119485246 61448 314123\n",
      "1025 0.3264538871107404 24109 42690\n",
      "1026 0.03146629636212479 1472474 1758272\n",
      "1027 1.2734338245601902 10 33\n",
      "1028 7.3596476744176946e-06 1218075 1214775\n",
      "1029 6.9973438584266425 323 22\n",
      "1030 0.037082038387520784 956828 1160018\n",
      "1031 3.7269315258506137 22898 3321\n",
      "1032 1.0982122473580624 1527040 4354840\n",
      "1033 49.876048171966154 1284 1499753\n",
      "1034 6.621051497958705e-05 7114291 7056637\n",
      "1035 3.524274378113686 16783 109697\n",
      "1036 0.005543088194430251 926894 998537\n",
      "1037 0.0610455465107497 405294 316568\n",
      "1038 0.03717207583189971 157726 130068\n",
      "1039 0.5441545378290746 3219622 6732405\n",
      "1040 1.48654685173427 12 43\n",
      "1041 0.0030759505078996746 10661435 11269436\n",
      "1042 1.047284221712172 384022 138010\n",
      "1043 0.25672123615216963 1173324 1947444\n",
      "1044 0.016780269312744202 549437 625426\n",
      "1045 0.4048039419852332 355292 671278\n",
      "1046 1.7092166434629563 23060279 6238510\n",
      "1047 0.003003449322927391 4173039 4408120\n",
      "1048 0.8598423795987984 550281 1390901\n",
      "1049 8.219410809821206 1898 107\n",
      "1050 2.4504897534996193 444648 2127470\n",
      "1051 7.2028743115353056 85897 5866\n",
      "1052 3.6180130419758116 66 9\n",
      "1053 8.407548608692246e-05 268861 266407\n",
      "1054 0.9939250794388766 49539554 18280108\n",
      "1055 0.018193607573950714 11007857 12597432\n",
      "1056 0.07501326158080353 323514 246006\n",
      "1057 0.26100056082381307 1068691 1781253\n",
      "1058 0.8544220027839877 3039178 7659424\n",
      "1059 1.618685667293116 521 1862\n",
      "1060 0.21304073648190938 2972857 4716594\n",
      "1061 1.3359063334579266 369680 116375\n",
      "1062 2.69871700442797 76002 392897\n",
      "1063 0.10984279003282231 8553553 11914691\n",
      "1064 0.28957555807423796 2545391 4359715\n",
      "1065 0.1830693522905117 205 315\n",
      "1066 0.060573160315674335 491158 628215\n",
      "1067 16.347439098834855 81331 4636597\n",
      "1068 0.029689572496106865 86435 72754\n",
      "1069 8.205474172637388 281912 16071\n",
      "1070 0.7955948673258687 53469 130461\n",
      "1071 1.1664570999223385 1477319 501677\n",
      "1072 3.610397706614068 2759 18454\n",
      "1073 0.47638570002710284 1373700 2739335\n",
      "1074 3.0509724714793305 12763 73209\n",
      "1075 0.2517553982121945 154 255\n",
      "1076 6.411383164472779 1714896 21572598\n",
      "1077 0.020723414140124997 559099 484138\n",
      "1078 0.39979706356484446 19783 37231\n",
      "1079 6.0161620863441945 3648894 42402654\n",
      "1080 0.16490398954192925 246594 164294\n",
      "1081 0.03644696177908532 35445857 42901949\n",
      "1082 1.5429724238025089 8670733 30027626\n",
      "1083 3.1790462722592485 1588344 9446842\n",
      "1084 0.14301625537352602 151181 220667\n",
      "1085 0.06361177885892928 797026 1025670\n",
      "1086 6.002897033813017 55 648\n",
      "1087 0.01467622095350161 3937755 4444895\n",
      "1088 13.038728171854922 73 1\n",
      "1089 3.743163555994721 250283 36156\n",
      "1090 2.5902903939802346 9 1\n",
      "1091 0.8487303572395668 1389611 3491355\n",
      "1092 0.729884203352908 2398311 5635566\n",
      "1093 3.37462800769658 17 112\n",
      "1094 8.752371973949927 437231 8424544\n",
      "1095 0.07748109639909817 26647462 35200074\n",
      "1096 0.7433227966335179 27246 11504\n",
      "1097 0.7949808924824926 38642 94252\n",
      "1098 9.13987463450672 48421 995431\n",
      "1099 0.6289210285291114 21982 48584\n",
      "1100 0.37520369769664247 270636 146677\n",
      "1101 0.7243243954732358 338276 792297\n",
      "1102 0.741654653365319 849723 359141\n",
      "1103 1.4978192774265033 432729 1471397\n",
      "1104 0.6178786824547611 5996978 13161664\n",
      "1105 0.6962807388474939 3439675 1493231\n",
      "1106 5.861757351284119 63527 715217\n",
      "1107 0.17576044131090718 5080 3340\n",
      "1108 0.3063154043468994 6576698 11438565\n",
      "1109 0.15511523897629897 598 403\n",
      "1110 0.5215605341453737 3108 1509\n",
      "1111 0.07081431239850533 81 106\n",
      "1112 0.1097438304467077 1063823 763832\n",
      "1113 5.360528755405962 91872 930468\n",
      "1114 0.12094033334132906 930917 1318085\n",
      "1115 0.0934704708260552 305512 414768\n",
      "1116 0.14148908548921504 697248 1015658\n",
      "1117 7.831047601401172 37621695 2291438\n",
      "1118 0.0016004925190154166 4153593 4323131\n",
      "1119 3.4947838791481054 202034 31155\n",
      "1120 0.1164518017176643 343380 244102\n",
      "1121 0.16332804436474024 3268 4896\n",
      "1122 1.8542072401381038 4542754 1163974\n",
      "1123 0.6783415238166534 321390 141039\n",
      "1124 2.4724311958536815 43 211\n",
      "1125 0.014653915549794314 604367 535461\n",
      "1126 9.357479907224153 1406422 29965580\n",
      "1127 0.0961688028418331 1127916 1538000\n",
      "1128 0.18392225431450102 1081539 704352\n",
      "1129 1.6717518229655153 100 367\n",
      "1130 1.4799696869311896 1064535 3593328\n",
      "1131 35.249286297800246 3782705 9984\n",
      "1132 1.6618718749047845 722684 199105\n",
      "1133 0.7046289657492083 290917 125664\n",
      "1134 0.028991751899926584 20352732 24130698\n",
      "1135 2.2032330221341243 85551632 19390648\n",
      "1136 1.0211514214134192 4231230 11623318\n",
      "1137 6.5789652063423505 25 1\n",
      "1138 6.869844517505458 7 109\n",
      "1139 0.024112393629463085 102507 119727\n",
      "1140 8.478761829584595 3189414 58654767\n",
      "1141 0.2673270772258214 962 1614\n",
      "1142 0.550883596719717 728555 346834\n",
      "1143 1.1658147579184694 652371 1920505\n",
      "1144 0.4451180694839006 455 233\n",
      "1145 0.09414423528230835 1145253 1556524\n",
      "1146 0.3609702935229674 1093077 1993327\n",
      "1147 0.05384634056922616 842407 667951\n",
      "1148 0.1929786450186293 424681 658940\n",
      "1149 3.8368507515536483 10706 1509\n",
      "1150 6.46394474779937 780247 9917346\n",
      "1151 0.3487116740261857 30826890 17079334\n",
      "1152 0.016564300340810603 257931 226782\n",
      "1153 0.0173835412420578 714734 815464\n",
      "1154 0.08856164829461932 60822069 45166678\n",
      "1155 3.879788014244024 83012 11579\n",
      "1156 0.12364813959979076 500626 711586\n",
      "1157 1.8981192072944897e-05 52162207 52389960\n",
      "1158 0.22192373120719064 121107 193982\n",
      "1159 0.6326586969135901 168211 372643\n",
      "1160 1.6651739987913254 1593421 438439\n",
      "1161 0.010258795383959808 23791625 21499892\n",
      "1162 0.10696074280693053 9996758 13864192\n",
      "1163 0.88150006004648 7002329 17905810\n",
      "1164 1.5076585736906543 1580058 5394228\n",
      "1165 0.2753557105103334 1583 2676\n",
      "1166 10.991518259944096 93 2587\n",
      "1167 3.4519746248799144 4121 642\n",
      "1168 1.0457204761004195 3833879 10659825\n",
      "1169 0.7761592631367171 7178727 17324491\n",
      "1170 48.77332227884022 2157 1\n",
      "1171 0.4804530139182014 3 1\n",
      "1172 2.166243911398475 243 55\n",
      "1173 4.098121528427212 48 370\n",
      "1174 0.18282342851584948 232321 356274\n",
      "1175 3.1449461157250123 27593 162551\n",
      "1176 0.35403109864586324 73953596 134080846\n",
      "1177 1.489487108387086 281568 954151\n",
      "1178 4.252701680177639 17660 2245\n",
      "1179 0.2864668667739966 786720 1343588\n",
      "1180 11.30659616818021 836 28\n",
      "1181 0.013880093607785781 13546803 12041232\n",
      "1182 0.01314607937875913 2127243 1896804\n",
      "1183 0.9352894721494981 9336031 24556705\n",
      "1184 0.56375773231303 1350428 2861254\n",
      "1185 3.924366819651561 31 231\n",
      "1186 0.43601257559167 1151014 2227670\n",
      "1187 0.004319349778621739 3418512 3650730\n",
      "1188 0.10102378518144213 9882796 13580540\n",
      "1189 0.030113649855878593 48036 57139\n",
      "1190 0.8969706749419825 283931 732028\n",
      "1191 0.011446951281949287 266079 239081\n",
      "1192 1.066724539935265 18391649 51661887\n",
      "1193 0.16039590324337663 15307362 22847198\n",
      "1194 0.345424956334317 28724 51701\n",
      "1195 5.887583644984604 671 7605\n",
      "1196 0.1024303522041403 3516813 2553609\n",
      "1197 0.008298262948222288 6845197 7498043\n",
      "1198 0.01235035575434674 25203690 28166197\n",
      "1199 0.19281299376066863 1619515 2512383\n",
      "1200 0.09735626365134253 34217 46747\n",
      "1201 5.965059041176741 22 1\n",
      "1202 0.4804530139182014 1 3\n",
      "1203 3.3141621554528835 977 6038\n",
      "1204 0.00521282122848471 3094503 3326189\n",
      "1205 0.41785120115322577 14505 7599\n",
      "1206 0.068153309946532 1357398 1762321\n",
      "1207 2.4097999193742163 5262462 24852346\n",
      "1208 2.466666451476591 1336 277\n",
      "1209 6.451731821639476 24 316\n",
      "1210 0.06968250347701023 4931522 6421310\n",
      "1211 2.931345852765852 642798 3561543\n",
      "1212 0.0784917532549537 841545 1113656\n",
      "1213 0.3121093432949495 2109 3688\n",
      "1214 1.5208508970167736 968623 3324598\n",
      "1215 0.09085110710573069 2459296 3324404\n",
      "1216 1.2832024935033917 1506527 4676700\n",
      "1217 0.004791897485698186 3136416 3361221\n",
      "1218 32.636707733940995 468934 1548\n",
      "1219 0.0013385191129887447 406183 391591\n",
      "1220 2.7517917061294783 332395 63273\n",
      "1221 0.18213681074185614 995 649\n",
      "1222 1.596917569501628 45 12\n",
      "1223 0.6474498501784757 194 435\n",
      "1224 0.5046700591623826 748482 1523018\n",
      "1225 0.004766284126368879 7758988 8313579\n",
      "1226 0.4581222867756257 121337 61665\n",
      "1227 11.88371267151177 339 10680\n",
      "1228 0.02748298138088514 14007939 16533757\n",
      "1229 3.270432222076624 52842 322388\n",
      "1230 1.3268604445409977 4185158 13242600\n",
      "1231 9.524916107571626 415 18\n",
      "1232 3.144606636302577 102726 605090\n",
      "1233 0.4506788305832249 845901 1655286\n",
      "1234 1.4551147761397383 3439557 1029487\n",
      "1235 0.21299034689059537 18532171 29400656\n",
      "1236 41.54474595028423 5038 7\n",
      "1237 0.007565937336015282 2852554 3111787\n",
      "1238 0.08288433002526405 11971735 15965735\n",
      "1239 3.0292225057639377 56 9\n",
      "1240 0.1182135526617891 2715359 3829546\n",
      "1241 0.0788454542433901 1238439 1639918\n",
      "1242 0.5355586150147456 233799 486036\n",
      "1243 3.3883563524421985 1505 238\n",
      "1244 0.8686595517445211 1248 3171\n",
      "1245 0.07910941547463424 397846 527068\n",
      "1246 1.9218120556728056 87 21\n",
      "1247 11.599420279120556 344576 10384916\n",
      "1248 4.933155119738183 2036 220\n",
      "1249 0.9020017199699931 128245 49610\n",
      "1250 0.18209595841545617 2786638 4269783\n",
      "1251 0.17654388394664786 273 179\n",
      "1252 0.31700521074869115 1290117 734695\n",
      "1253 0.08578495734057699 536340 400165\n",
      "1254 0.17488832138917115 5429677 8248870\n",
      "1255 0.02104990504437021 25297 29247\n",
      "1256 0.1497777418299851 1800842 2651872\n",
      "1257 1.5605558483408366 969 3382\n",
      "1258 1.7053221752536676 1282267 4732767\n",
      "1259 0.4804530139182014 3 1\n",
      "1260 1.4976263070418956 34 118\n",
      "1261 0.052933300345936705 2023273 2546676\n",
      "1262 0.8158699084577006 917784 2264746\n",
      "1263 1.3688502296546952 8405 2608\n",
      "1264 0.061535491097341764 13624 17460\n",
      "1265 3.80821904979601 43924 309187\n",
      "1266 6.999341386322576 3391 47800\n",
      "1267 1.9218120556728056 2 11\n",
      "1268 20.046544950281856 175 1\n",
      "1269 0.05021987799686604 4722385 3774304\n",
      "1270 5.640177343107142 42 3\n",
      "1271 0.006441658228037428 521383 481172\n",
      "1272 0.0017550141274301186 6139502 6402167\n",
      "1273 0.002296418740355824 1584244 1662011\n",
      "1274 0.31291315431946526 789176 1380742\n",
      "1275 0.03477628301826216 1966577 2369736\n",
      "1276 2.9047375739299275 1192 216\n",
      "1277 4.555341312160281 188168 22264\n",
      "1278 0.010421084896953309 574945 636738\n",
      "1279 0.8509361396467583 1598035 4019820\n",
      "1280 0.09036652722740587 3275521 2425085\n",
      "1281 5.159850226963338 403080 3907557\n",
      "1282 0.1892734679746984 5121297 7912656\n",
      "1283 0.060880582448258905 630917 807477\n",
      "1284 3.861529566170392 222085 1584686\n",
      "1285 2.4466785678223117 502274 2400263\n",
      "1286 0.04799187061653317 1015400 1264091\n",
      "1287 1.494617782994214 38421908 130473932\n",
      "1288 45.573718039902566 251301 293\n",
      "1289 6.663313654055612 2154452 163032\n",
      "1290 0.004042421351130816 22644223 24130698\n",
      "1291 9.061473277944554 655035 13292008\n",
      "1292 0.4182187643162165 10454982 19960933\n",
      "1293 1.8013279107399034 11672 44674\n",
      "1294 0.08659558467548696 268795 360763\n",
      "1295 1.6320706482065797 23019 82589\n",
      "1296 0.301093797707521 2171707 3759304\n",
      "1297 1.2298932406108598 45166 14899\n",
      "1298 0.2304337527468133 1171764 1893725\n",
      "1299 0.11228939702177104 462377 646438\n",
      "1300 6.303948185060827 267583 3295126\n",
      "1301 0.34686961640110575 17372 31307\n",
      "1302 2.936511986798269 163500 907273\n",
      "1303 0.004651484405389443 21863333 23406476\n",
      "1304 0.0016278926190196946 4155283 4326365\n",
      "1305 0.10901413951929159 1122055 1561013\n",
      "1306 0.3005578149789463 707350 1223851\n",
      "1307 0.9139125492336685 352157 916046\n",
      "1308 0.01824296501194231 16300859 18658174\n",
      "1309 1.206948960812582 8 2\n",
      "1310 0.014575328073092452 3966034 4474949\n",
      "1311 0.19666375193307045 997320 1553926\n",
      "1312 0.021628429518463742 141165 163530\n",
      "1313 1.3635012135979303 1038626 3338728\n",
      "1314 0.07383977373476061 164269 125182\n",
      "1315 1.7352883562431012 558153 2083778\n",
      "1316 0.07806064769067876 140654156 185990843\n",
      "1317 0.36816081193713823 265 144\n",
      "1318 6.611785052917597 313 23\n",
      "1319 6.88042796370588 123 8\n",
      "1320 8.583485450464755 15373732 821109\n",
      "1321 0.37002161242119913 2009 1093\n",
      "1322 0.0267056436202281 18886 22239\n",
      "1323 0.1672976475083924 6039002 9090766\n",
      "1324 1.0768325916852364 803486 2268028\n",
      "1325 0.009999987285762077 31928 35286\n",
      "1326 0.19981297958769892 10278934 6573800\n",
      "1327 0.5388136452838015 756177 362939\n",
      "1328 1.1279711022265282 819623 2370616\n",
      "1329 0.38196803755747244 237499 440628\n",
      "1330 1.98092320645889 26643 108854\n",
      "1331 0.05737933242841383 15775 20045\n",
      "1332 25.340740687916846 66605 10226660\n",
      "1333 0.10081212182944467 764991 1050870\n",
      "1334 0.18078446275546348 33773187 51668834\n",
      "1335 1.9308783347085394 684699603 170616734\n",
      "1336 15.893933815144347 130 7057\n",
      "1337 0.8673165721788891 3660235 9288953\n",
      "1338 0.366802760477271 6317890 11577086\n",
      "1339 1.1420471505452154 9144 3140\n",
      "1340 0.3386628930268014 50992 91252\n",
      "1341 0.1591175424272157 15139797 22560989\n",
      "1342 1.0233364045087316 10 3\n",
      "1343 0.00044439204372626745 24350375 23842427\n",
      "1344 0.35681087028656 6884376 3788302\n",
      "1345 0.050288223192924614 3611 4519\n",
      "1346 1.5483424654662767 16 58\n",
      "1347 4.7582381740015345 17485 1973\n",
      "1348 0.27582906392415546 56347342 95271374\n",
      "1349 5.432423391507639 71 6\n",
      "1350 2.0106689250033534 39013 161079\n",
      "1351 21.499643887866434 56 5882\n",
      "1352 0.17395631668673817 2928190 4443602\n",
      "1353 0.42698878664726475 739719 1421851\n",
      "1354 0.2510023707808147 292568 482847\n",
      "1355 0.39849012045895765 5866995 3120776\n",
      "1356 0.18724992741184415 724621 470090\n",
      "1357 1.2520054816310306 942704 2886164\n",
      "1358 0.0035797397044649536 2075 2203\n",
      "1359 1.9632925772592016 1219086 300269\n",
      "1360 0.18158231905890523 23415112 15290884\n",
      "1361 7.858829425236432 7 131\n",
      "1362 0.735560943727577 3514 8286\n",
      "1363 5.84622077427496 201 17\n",
      "1364 0.7436277165066714 135830 321741\n",
      "1365 1.7238143917722482 3218 865\n",
      "1366 0.9156324620537483 766404 294365\n",
      "1367 5.128011019419492 4935 47515\n",
      "1368 0.04580303385662968 739101 596702\n",
      "1369 0.39630562186856183 945035 503555\n",
      "1370 0.12850025107496846 1999349 2861343\n",
      "1371 0.06455461266402675 3088945 2395887\n",
      "1372 3.7173541526052793 14545 100018\n",
      "1373 0.2862869961112355 423 723\n",
      "1374 1.601549309619249 25095175 88961079\n",
      "1375 9.119754022154915 140042 6834\n",
      "1376 0.4414570901023265 12833 6603\n",
      "1377 6.550973031844098 949531 73440\n",
      "1378 51.098846874334214 8903 6\n",
      "1379 0.14094040230904492 10386586 15118747\n",
      "1380 3.304798893439737 728793 4488624\n",
      "1381 0.07160856941813248 752440 983305\n",
      "1382 0.26942306168625096 894 1503\n",
      "1383 2.079807656299913 17173264 72639411\n",
      "1384 0.09778115615372997 683803 934835\n",
      "1385 32.52605712965558 473705 1579\n",
      "1386 2.374225339037379 99286 463527\n",
      "1387 2.25179775615948 286 63\n",
      "1388 0.004038649393030224 14308760 15247603\n",
      "1389 0.0830529354894466 468898 351495\n",
      "1390 0.13791054442081627 22222411 15328873\n",
      "1391 3.4669963085063342 2175541 14002907\n",
      "1392 0.17132342143600246 106565 161204\n",
      "1393 1.5681933832993102 1024 292\n",
      "1394 4.264383751335125 3353350 26443187\n",
      "1395 1.2934779982274678 130188 41748\n",
      "1396 0.6148425414957617 83 183\n",
      "1397 0.402300074348667 93342 176011\n",
      "1398 0.15175192112719735 374473 552843\n",
      "1399 21.82162327911354 68806 643\n",
      "1400 14.377268260428258 132 2\n",
      "1401 0.1698801343305133 154801 233763\n",
      "1402 7.007378144383726 1679 23710\n",
      "1403 0.786310118952075 32042 77774\n",
      "1404 5.0014259862060335 24336 227780\n",
      "1405 0.007023284466102461 2343128 2547957\n",
      "1406 7.193758378745527 178189978 12191414\n",
      "1407 0.5186509471534075 444395 913140\n",
      "1408 0.7962648917839208 206222 84488\n",
      "1409 0.1442037376446158 2746178 4014657\n",
      "1410 35.941490590681155 58613 145\n",
      "1411 5.900233461778562 3912033 344733\n",
      "1412 0.274684591368784 1182131 1996554\n",
      "1413 4.935262555658891 294416 31926\n",
      "1414 0.000965083031683856 4148907 4279819\n",
      "1415 0.249369411820849 37800 62283\n",
      "1416 2.18752396835119 1484634 6515584\n",
      "1417 0.08892419699197918 7897947 10641958\n",
      "1418 0.13463611043992924 166974 240993\n",
      "1419 3.242856925618677 121525 735772\n",
      "1420 0.9141912734429921 29888660 77758873\n",
      "1421 0.03560396648459907 227271 188190\n",
      "1422 2.187666093530978 78 17\n",
      "1423 6.374538545670673 3496997 280023\n",
      "1424 18.664894840852558 27093 2037748\n",
      "1425 0.6730372583149283 1089581 2474856\n",
      "1426 0.0013070402032437677 1173279 1131619\n",
      "1427 0.0013687984824341975 45873 47602\n",
      "1428 0.008881649132641946 58506 53244\n",
      "1429 32.532567735064056 737 221388\n",
      "1430 0.008298459240898627 11910 13046\n",
      "1431 8.281374523362715 610690 10854432\n",
      "1432 2.8441952267838126 12796 69108\n",
      "1433 0.10874355123939171 847540 1178622\n",
      "1434 4.503223811295563 65 550\n",
      "1435 13.328686886001586 1921 74010\n",
      "1436 10.241174085612807 9194 225617\n",
      "1437 0.06742313958023073 661960 858224\n",
      "1438 7.441181053401095 9 152\n",
      "1439 3.1631129106972167 303 1799\n",
      "1440 0.6079142806851902 5994 2748\n",
      "1441 0.12217308749319022 204992 290762\n",
      "1442 0.353747317043986 1077772 594597\n",
      "1443 1.8856198399906876 27029590 6846608\n",
      "1444 15.717217612172593 12657 666995\n",
      "1445 0.16996362162915785 21719529 32801632\n",
      "1446 3.7067449027330404 47 6\n",
      "1447 0.038245639464712884 19765530 24034840\n",
      "1448 17.370323700526356 903 13\n",
      "1449 2.940525639789298 7853325 43629486\n",
      "1450 0.2035566426740936 605818 951227\n",
      "1451 0.8433675762666382 242 96\n",
      "1452 0.0 6 6\n",
      "1453 11.972153564479992 3052 97144\n",
      "1454 4.229691247775839 143743 1124006\n",
      "1455 7.583111256979088 5348105 340614\n",
      "1456 0.09977248326127079 835472 609189\n",
      "1457 0.2374529749644331 120786 74197\n",
      "1458 0.844683503941421 5054225 12670688\n",
      "1459 0.26744165431820305 2961 1765\n",
      "1460 0.00280175784348914 3974184 4190211\n",
      "1461 0.31756179513155297 11074803 19456842\n",
      "1462 0.5919158988279433 936568 433920\n",
      "1463 0.3891234793145757 65395 122028\n",
      "1464 0.8287399542916225 11766937 29243147\n",
      "1465 0.08778039636051571 1826460 2456303\n",
      "1466 0.06719262950620924 2314909 1786315\n",
      "1467 0.011228465914623684 2008366 2232866\n",
      "1468 17.249051171751642 225 14381\n",
      "1469 0.07811203584934552 244 323\n",
      "1470 0.1074846381848333 399173 554044\n",
      "1471 4.529778938351493 4436465 528099\n",
      "1472 0.13465403674113288 469835 678127\n",
      "1473 0.0041483281374496426 857847 914917\n",
      "1474 3.5936420626103462 931 139\n",
      "1475 25.900312969443736 270 43973\n",
      "1476 0.15908738699729205 15078928 22469434\n",
      "1477 0.06009458410010881 14911434 19053871\n",
      "1478 0.5524442705901522 28474 59876\n",
      "1479 0.33784710077328844 671609 1201016\n",
      "1480 0.3110592810139197 2106386 3679220\n",
      "1481 0.09580661591680102 344955 470098\n",
      "1482 0.57806891131163 447918 958069\n",
      "1483 0.607779052475775 596459 1300642\n",
      "1484 0.02635904107966038 1166685 991845\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1485 38.7404856385098 9539931 18897\n",
      "1486 0.14500988815930982 280219 410089\n",
      "1487 2.3636511682375145 384356 1788242\n",
      "1488 0.6495168179409677 3048354 6824561\n",
      "1489 0.01596136194433468 2481670 2187137\n",
      "1490 2.0935745431814903 16 3\n",
      "1491 0.25963899270524454 7830 13034\n",
      "1492 3.7865663081964716 1 13\n",
      "1493 0.5084360715937352 1883961 923420\n",
      "1494 0.0 3217 3217\n",
      "1495 1.4101209711555227 31456 103141\n",
      "1496 0.10775538540404841 259877 360853\n",
      "1497 1.4472952829929897 114574 381558\n",
      "1498 0.07462762062875856 1290106 1695377\n",
      "1499 9.460391735145196 64 2\n",
      "1500 5.0905010534062605 610 63\n",
      "1501 6.000290589334148 146317 12631\n",
      "1502 0.4804530139182014 10 21\n",
      "1503 0.22865100737345975 24625 15265\n",
      "1504 0.27804089248762914 8044 13630\n",
      "1505 0.11623093770341744 31 44\n",
      "1506 0.17297380321704803 12493503 18936854\n",
      "1507 0.3145854904337677 593 338\n",
      "1508 1.489463608677795 38338 129917\n",
      "1509 0.33944117063090945 19944 11137\n",
      "1510 0.2627086869066565 32882050 54898064\n",
      "1511 0.033679422917640506 17621594 14667080\n",
      "1512 0.3978054503624952 3882 7295\n",
      "1513 0.8450598333388505 503 200\n",
      "1514 0.23571783918511788 25 15\n",
      "1515 0.093022502669474 1160934 1574946\n",
      "1516 0.1056506023938915 68765 95177\n",
      "1517 5.981379700652842 12 149\n",
      "1518 0.10528876347003062 20160 27888\n",
      "1519 2.1374344936445278 119249 514511\n",
      "1520 0.5019041667220977 2214298 4496897\n",
      "1521 0.6835676350197449 73570 32183\n",
      "1522 0.19863566556309337 78694 122886\n",
      "1523 0.6666906273567559 4410244 9978586\n",
      "1524 2.4072885791793563 150 31\n",
      "1525 0.0691549161915801 1371532 1784078\n",
      "1526 0.05428158928930226 1936915 2445087\n",
      "1527 0.0009388664813549417 1749723 1804166\n",
      "1528 2.4464694163245957 154557 738549\n",
      "1529 0.17139544836264278 7366919 11145089\n",
      "1530 15.036446506392588 975907 20199\n",
      "1531 0.07374930831736871 2310646 3031615\n",
      "1532 27.529791947413795 257033 1352\n",
      "1533 0.23571783918511788 12 7\n",
      "1534 0.10892225504310886 4803313 3453099\n",
      "1535 0.16440195389316548 2 1\n",
      "1536 27.80452434461347 2 584\n",
      "1537 0.3107271813467484 54195431 94634840\n",
      "1538 4.402145282688627 103892 846821\n",
      "1539 0.32883619764470634 16057 9049\n",
      "1540 0.4368905197338275 495697 255951\n",
      "1541 0.008397328846951624 170610 186983\n",
      "1542 0.0835163672594987 1858257 2480924\n",
      "1543 0.09680089247053893 500223 682787\n",
      "1544 0.674506315445954 437871 995464\n",
      "1545 0.2699971838263842 58943 35056\n",
      "1546 3.845184205337212 1403775 197552\n",
      "1547 1.4503629561591262 15811 4741\n",
      "1548 0.5680163250933984 43591 20515\n",
      "1549 0.5245417649373284 7117 3449\n",
      "1550 1.930815715033712 316649 1270714\n",
      "1551 9.708821445123524 467960003 20748721\n",
      "1552 0.0809151491903435 25070620 33319824\n",
      "1553 0.16334665661915357 10953069 16408203\n",
      "1554 0.2609428178959135 4 2\n",
      "1555 3.818577663935615 366 51\n",
      "1556 5.236709272368853 18823 185585\n",
      "1557 0.393000845399364 218 116\n",
      "1558 45.8053173069979 12 11303\n",
      "1559 0.9518909775775043 106858 283484\n",
      "1560 0.018161647102768803 21437105 24529794\n",
      "1561 4.494042361451258 1915 229\n",
      "1562 0.36082548925685837 6338568 11557568\n",
      "1563 0.08822364337181036 5970487 4436225\n",
      "1564 0.0012749862822534282 843114 873763\n",
      "1565 0.18930682190056175 13487690 20839943\n",
      "1566 0.04348654238509461 642097 790981\n",
      "1567 0.6086530464255696 1467280 3201351\n",
      "1568 0.7339009618055754 17126233 7271271\n",
      "1569 0.07324919522939483 96 73\n",
      "1570 0.0025192493798028505 8174216 7774061\n",
      "1571 2.183353424470154 1363594 5975943\n",
      "1572 0.8153488597847615 926632 2285920\n",
      "1573 1.3043864724752046 14 46\n",
      "1574 0.1642632751134359 1294093 1940808\n",
      "1575 2.864760791698894 73402 398825\n",
      "1576 3.468627404292142 70945 456844\n",
      "1577 0.5778537407872102 19745 9232\n",
      "1578 0.23462884443779172 18615323 30215955\n",
      "1579 2.300290162270763 210850 960853\n",
      "1580 1.4010920175954795 1099 3592\n",
      "1581 0.005802425411424609 2996501 3233674\n",
      "1582 0.000667211265497174 2496165 2561482\n",
      "1583 0.15972708088786405 1269464 1893172\n",
      "1584 0.4367214578426224 3847618 7450666\n",
      "1585 0.17775209858161944 13908 21202\n",
      "1586 4.054340149562572 59383 444772\n",
      "1587 0.018578732707279073 12744 11120\n",
      "1588 0.0979028505984861 794077 1085803\n",
      "1589 0.0005137770956448514 4104 4012\n",
      "1590 0.021430556498815354 14390097 16658691\n",
      "1591 0.5824625207136738 11972397 5581242\n",
      "1592 8.669720902034712 113 5\n",
      "1593 0.0 11 11\n",
      "1594 2.400782912512987 371 78\n",
      "1595 0.20012656465139403 142286 222560\n",
      "1596 3.448874824255569 1959 305\n",
      "1597 0.09032155769890589 6019619 4457056\n",
      "1598 1.2342953482722976 8748124 26571197\n",
      "1599 2.4610082262258945 31498 151216\n",
      "1600 0.2722861291406171 5125 3041\n",
      "1601 0.8282095350866924 804 1999\n",
      "1602 3.012343307966826 394482 2237660\n",
      "1603 10.175387289887345 169 6\n",
      "1604 0.09294167693564996 305240 414040\n",
      "1605 0.12184078709258787 8486330 12031325\n",
      "1606 11.320212475789925 10555 364\n",
      "1607 1.4227342435734625 1186648 3911486\n",
      "1608 0.2097009171488607 1011266 1598610\n",
      "1609 0.002081030279681211 834603 873558\n",
      "1610 2.7889855546445412 354993 1885835\n",
      "1611 0.32552143552382845 90 160\n",
      "1612 0.8004022958916482 20152 49303\n",
      "1613 0.09751716733000435 706949 966070\n",
      "1614 0.12203435512373773 15596917 22118347\n",
      "1615 0.016392611168391146 541845 476728\n",
      "1616 0.0010683859630352711 64737 66888\n",
      "1617 0.29724045112428926 9404136 16221658\n",
      "1618 0.00513918051962859 3140976 3374414\n",
      "1619 0.18715668753416353 108 167\n",
      "1620 1.0731397918376693 4602607 12968815\n",
      "1621 37.83300908631529 4321 2027527\n",
      "1622 0.01295075487404583 3953793 4430342\n",
      "1623 0.0030067516651956037 4171393 4406514\n",
      "1624 0.1329804965771343 783759 1128638\n",
      "1625 1.8031075103319472 48080 184137\n",
      "1626 1.1955473728549144 2534617 7564405\n",
      "1627 1.1892999099677093 12702 37802\n",
      "1628 0.8540761496507974 828 328\n",
      "1629 2.0118094146447345 45 189\n",
      "1630 0.08656892473804946 2410072 3234527\n",
      "1631 0.000207110824525198 83354 82163\n",
      "1632 0.14464765081996217 86533360 126577621\n",
      "1633 0.10463042973103949 549 397\n",
      "1634 2.8532087450446824 62145 336510\n",
      "1635 0.08012528973799585 17928654 23794735\n",
      "1636 0.08276097481015168 2 3\n",
      "1637 4.193045980769285 30 3\n",
      "1638 6.291255489321926 3012555 245256\n",
      "1639 0.03104932892246538 1519776 1274246\n",
      "1640 0.23241984060468773 162612 263346\n",
      "1641 1.206948960812582 8 2\n",
      "1642 2.042651843339365 496160 2071677\n",
      "1643 0.06217694737975968 3943229 5059932\n",
      "1644 0.6373419996679576 576745 259579\n",
      "1645 13.928046420588528 10130 423108\n",
      "1646 1.0601161442364677 27 9\n",
      "1647 7.034004344134511 1304 91\n",
      "1648 13.154687819930238 81 3082\n",
      "1649 0.5459697658681151 4762 2274\n",
      "1650 0.64375986197321 25 57\n",
      "1651 0.0051120816639287645 132878 123709\n",
      "1652 0.10497733680771085 2798158 2023764\n",
      "1653 0.25197936570224094 45528435 75212132\n",
      "1654 0.04624460215200994 64225 79634\n",
      "1655 3.235757870890844 8489 51300\n",
      "1656 0.010844522428613996 3989839 4427734\n",
      "1657 4.364220051301727 149191640 18470226\n",
      "1658 0.11686686309319488 10093844 14207695\n",
      "1659 3.7454721489829756 346292 49996\n",
      "1660 0.16440195389316548 2 1\n",
      "1661 13.484338634252044 109585 2785\n",
      "1662 2.2102270554524206 423083 1871047\n",
      "1663 0.9662287928530626 454568 1214780\n",
      "1664 0.2724346078806707 118810 200233\n",
      "1665 4.153777443123759 335810 2577641\n",
      "1666 0.028946740798261133 2964644 2500822\n",
      "1667 0.0017313355343182564 2101224 2190499\n",
      "1668 1.6624610281555965 12506 3444\n",
      "1669 3.61554399553216 153 22\n",
      "1670 25.321734269932257 612 3\n",
      "1671 0.24579776796721045 302362 496412\n",
      "1672 0.0006887539835097416 10522484 10802293\n",
      "1673 0.4491289611512895 24136 12348\n",
      "1674 0.015731544095704753 2520042 2856797\n",
      "1675 3.314827702698491 37465615 6066347\n",
      "1676 0.0458652133167808 1989222 2464294\n",
      "1677 0.2512962572361573 17050915 28148624\n",
      "1678 0.2088254594128621 24446661 38608324\n",
      "1679 1.206948960812582 0 2\n",
      "1680 1.0043086761618516 78 28\n",
      "1681 0.5839417661821373 54873 117824\n",
      "1682 0.0012676578477109246 3978799 4123013\n",
      "1683 0.11814593246320083 16054 11384\n",
      "1684 0.21739395634836084 10259858 16354356\n",
      "1685 0.17085448854151852 16332749 24692949\n",
      "1686 0.16030801701647596 6162335 4129147\n",
      "1687 5.9183190584606343e-05 856691 863307\n",
      "1688 0.8296906332584064 1380019 3431410\n",
      "1689 0.13034392419070268 1149276 1648992\n",
      "1690 1.70392170837222 39715256 146507839\n",
      "1691 10.996847279808932 173890 4791400\n",
      "1692 0.36242996094594576 2131158 3891082\n",
      "1693 0.6391433395767281 454255 1010423\n",
      "1694 0.0419287822653438 102654 125981\n",
      "1695 8.056034286830672 22 392\n",
      "1696 1.5351710463222183 313 1083\n",
      "1697 0.4838681200124676 27008 54150\n",
      "1698 0.0102384056264108 230668372 208470189\n",
      "1699 0.1646134158356738 372150 248035\n",
      "1700 0.16584383746345668 4067566 6112184\n",
      "1701 0.3066763267911532 108092568 188061986\n",
      "1702 0.0523036794890116 29084234 23138462\n",
      "1703 4.753441477933387 237965 2105622\n",
      "1704 1.6030208212543224 116 414\n",
      "1705 0.22254003671808864 10539631 16892742\n",
      "1706 3.016999584892896 49 283\n",
      "1707 0.039011765944579085 3110013 3789147\n",
      "1708 0.6458024375750493 1855387 4144209\n",
      "1709 8.460657254110181 2 54\n",
      "1710 0.16232763989929389 294476563 196822119\n",
      "1711 49.55470108268106 312586 273\n",
      "1712 7.378268316850218e-05 81258 80563\n",
      "1713 0.015298206799672072 1072211 947467\n",
      "1714 0.5557781180940594 79374 37662\n",
      "1715 7.238271873190709 201 2976\n",
      "1716 0.904260823788533 934261 2417968\n",
      "1717 0.8306193749516054 84912088 211241006\n",
      "1718 0.14360126327607683 19882227 29042893\n",
      "1719 1.4865468517342695 43 12\n",
      "1720 0.11329695047337268 43815 61349\n",
      "1721 2.694311527621508 325 1682\n",
      "1722 0.9199772326853606 821 2144\n",
      "1723 0.6817264471338703 52186 119163\n",
      "1724 0.46166550222283487 204277 403001\n",
      "1725 6.377047691229133 291200 3638385\n",
      "1726 5.8322212419697355 671494 7513868\n",
      "1727 2.0225150724975793 64718 268326\n",
      "1728 0.5622664568684241 4226718 8946570\n",
      "1729 0.5050750705118214 5051946 10282670\n",
      "1730 0.306781560812338 211686 368332\n",
      "1731 0.22541545747733746 10079335 16204139\n",
      "1732 4.6804265351830394e-05 3839284 3865640\n",
      "1733 29.493659779114342 1369 5\n",
      "1734 0.007716182376897645 8995502 8239032\n",
      "1735 0.3603321807288302 682309 1243593\n",
      "1736 2.4623393729874463 70042 14583\n",
      "1737 0.3669350539036695 34217474 62707942\n",
      "1738 7.003681308181256 6247 442\n",
      "1739 0.020064928242788303 33361424 28955192\n",
      "1740 0.11321356601688148 6 4\n",
      "1741 0.06854317085014491 381987 496307\n",
      "1742 11.065067072937733 492267 17682\n",
      "1743 3.0704479083176084 98 570\n",
      "1744 0.9995497128811108 183968 499966\n",
      "1745 0.01773494052238147 146348 167195\n",
      "1746 5.960686939393601 702859 61172\n",
      "1747 0.3002991607939175 3074739 1777529\n",
      "1748 4.766466543985989 1643856 185225\n",
      "1749 2.6232162665559575 741062 146708\n",
      "1750 0.3863708031872592 23239 12481\n",
      "1751 0.014935244972529191 1127660 1274246\n",
      "1752 0.2014361451435448 13137471 20579284\n",
      "1753 0.34122867857523825 60109 107804\n",
      "1754 0.02633384506622572 794954 935014\n",
      "1755 0.3093583295987768 114561 65687\n",
      "1756 0.0019026310317897764 4041497 4221685\n",
      "1757 12.225565127392272 0 32\n",
      "1758 2.9968278599228535 95 16\n",
      "1759 0.9441669255313172 6313921 2389476\n",
      "1760 5.965059041176741 22 1\n",
      "1761 0.05324391618319425 2768101 2197708\n",
      "1762 0.07697432948878298 1046935 1381693\n",
      "1763 2.2430190980847566 86 388\n",
      "1764 0.006713234061508401 715186 658924\n",
      "1765 1.9131064552737318 23842602 95071090\n",
      "1766 0.8773856187747061 642866 1640286\n",
      "1767 1.3871732210443162 7788650 25291056\n",
      "1768 0.9970598245630964 37 13\n",
      "1769 0.15683502557104562 417204 619926\n",
      "1770 0.8290223298787527 23530 9466\n",
      "1771 22.678300585267145 467 3\n",
      "1772 0.2042905520850268 54 34\n",
      "1773 1.173577331207901 13637 40292\n",
      "1774 6.636693621853054e-05 2090865 2107968\n",
      "1775 0.08836123280548573 8343641 11231877\n",
      "1776 0.7184978645443917 87053 37295\n",
      "1777 0.27928584964702613 2032598 3447985\n",
      "1778 0.008275864429911947 20 22\n",
      "1779 0.18270838581137303 104 160\n",
      "1780 0.016639029170456336 24423648 27786331\n",
      "1781 0.4160756014775965 102223 53630\n",
      "1782 0.0027640258913360772 4166425 4391331\n",
      "1783 0.026475548162203027 1029 1211\n",
      "1784 0.008136031683290997 2878144 3149821\n",
      "1785 0.18894611590126523 1034525 1597791\n",
      "1786 1.2669462871519281 13446 41443\n",
      "1787 1.6837455559264045 7463911 27321443\n",
      "1788 0.001104669462000565 3914179 4046459\n",
      "1789 0.19170585148911543 3591109 5563923\n",
      "1790 16.730890423289434 9514 568618\n",
      "1791 9.691316335627969 10036 225735\n",
      "1792 0.045878901191581015 4823380 3893394\n",
      "1793 0.03217390082602505 369155 441681\n",
      "1794 0.16535119725438166 8535690 12818510\n",
      "1795 18.7656071082655 836 10\n",
      "1796 0.3296210236644516 3446764 6120001\n",
      "1797 0.0007507037865609563 71 73\n",
      "1798 0.006117065814063879 3865582 3574769\n",
      "1799 0.36609418564752877 32723582 59928559\n",
      "1800 0.8404509292929223 3509130 8776954\n",
      "1801 2.6794200236380785 24864105 127781099\n",
      "1802 0.14404123545669636 668502 977079\n",
      "1803 8.32951027065135 25 465\n",
      "1804 1.6863081136321025 1484449 405134\n",
      "1805 0.15612604971219518 2720763 1832689\n",
      "1806 17.296308501055247 2 191\n",
      "1807 0.16894882777693937 49901 75270\n",
      "1808 0.1942912596776906 20807955 32334029\n",
      "1809 0.5293061775829107 529603 255850\n",
      "1810 0.8673286821320645 2078020 5273639\n",
      "1811 9.486852284835278 543 24\n",
      "1812 0.027907067202648312 10 12\n",
      "1813 0.0905027627710353 1082874 1462951\n",
      "1814 0.3965543468526997 334234 627390\n",
      "1815 0.09965267242312448 14098158 19331237\n",
      "1816 0.20617821217967033 9496300 14953880\n",
      "1817 22.462906357708974 1107860 9685\n",
      "1818 8.71238198822493 674120 12901308\n",
      "1819 0.3989750667627661 7766252 14606001\n",
      "1820 0.8973691530938684 165915 427851\n",
      "1821 0.48778825286254796 45269 22515\n",
      "1822 2.1132450876959217 109744 469589\n",
      "1823 0.42478715590660704 2936600 1530348\n",
      "1824 0.09948395069074219 13194 18087\n",
      "1825 0.39277995178152686 79458804 148704378\n",
      "1826 0.0066366801464126735 929259 1008131\n",
      "1827 1.1485389766549785 146002 426377\n",
      "1828 11.055681845532392 5154172 185406\n",
      "1829 0.17925762526295805 16606 25360\n",
      "1830 2.305491591592533 65594 299430\n",
      "1831 0.13413898933793947 901253 624865\n",
      "1832 0.727955144170248 82421 193456\n",
      "1833 1.6023338213297234 41768775 11778970\n",
      "1834 0.0058972244529032205 3000715 3240229\n",
      "1835 2.4947103516333238 59678 289583\n",
      "1836 0.17806001899718493 12190792 18590534\n",
      "1837 0.2748726993884374 862804 1457490\n",
      "1838 0.9840700656844774 215080 579993\n",
      "1839 0.5123780591304756 33118 67755\n",
      "1840 9.470971824210632e-05 1861156 1879357\n",
      "1841 9.435094390512951 5415 250\n",
      "1842 7.817066931179246 1643634 100359\n",
      "1843 0.02089149964234272 18586186 21476462\n",
      "1844 0.6501346013289186 263346 589798\n",
      "1845 0.37410573820271137 70899 130700\n",
      "1846 0.20986371074621762 73 116\n",
      "1847 0.061606868350670904 469663 366430\n",
      "1848 0.23216794198045404 33 20\n",
      "1849 2.3153157248625362 1745477 7993626\n",
      "1850 0.10141277886331593 55 76\n",
      "1851 1.030402846949897 38494 106230\n",
      "1852 1.7055880720985344 1025513 3785490\n",
      "1853 0.05543183398461867 3817323 4830689\n",
      "1854 1.32627905744317 1217 3852\n",
      "1855 0.030124731594216376 27745835 33004718\n",
      "1856 0.17591598471453251 22998956 34983350\n",
      "1857 0.8368413563562576 109919 274387\n",
      "1858 0.6301752501442315 117 260\n",
      "1859 9.383402740006497 6433 137665\n",
      "1860 0.0256557381676087 144262865 169323515\n",
      "1861 0.4868195044547574 34002 68317\n",
      "1862 19.012784135750046 547 6\n",
      "1863 2.2792251234093053 60736636 13421216\n",
      "1864 0.056358183682290064 214495 169167\n",
      "1865 0.011501603780824415 353 317\n",
      "1866 0.052675757840995964 5315708 4225570\n",
      "1867 5.22342809846907 587628 5776641\n",
      "1868 0.09464834994150816 4339227 5902323\n",
      "1869 0.3717832502267543 2170681 3993953\n",
      "1870 0.630061633659009 135347 299349\n",
      "1871 0.922658289054797 78218 29932\n",
      "1872 0.8504767103779245 1776378 706355\n",
      "1873 0.21883149134137794 123807703 197655154\n",
      "1874 4.729893959851085 894900627 101685095\n",
      "1875 0.2806486505249683 135 230\n",
      "1876 0.041756661962418166 302 246\n",
      "1877 0.016449367238500816 14014 12327\n",
      "1878 0.9620260235635408 7 2\n",
      "1879 0.2854990395523176 22435 13148\n",
      "1880 0.007597233386536923 2854725 3114715\n",
      "1881 0.25615693511755333 1477491 890676\n",
      "1882 0.04851496904690619 145156 180923\n",
      "1883 5.2276364219033153e-05 829469 835488\n",
      "1884 32.00142732918206 805 230743\n",
      "1885 4.492876006995799 8541 71138\n",
      "1886 1.0553863233271141 133041 371661\n",
      "1887 14.881112978970494 2293 108624\n",
      "1888 0.0733293115934053 273 208\n",
      "1889 4.555953238623319 513726 4342301\n",
      "1890 0.001164170858444451 12245657 12670688\n",
      "1891 0.010570626935081553 4019780 4455061\n",
      "1892 5.683995252673208 384720 35459\n",
      "1893 2.06011328611534 35347 148494\n",
      "1894 4.637775508532753 335 38\n",
      "1895 0.038780379691047195 8701524 7146129\n",
      "1896 8.770533315021087 127268 6584\n",
      "1897 1.6618312937804096 17354662 62990352\n",
      "1898 0.030091531091723075 2321927 1952144\n",
      "1899 0.10737851881118728 148864 206587\n",
      "1900 0.020940567988883527 253069 218974\n",
      "1901 1.7693333594829983 1092613 4131893\n",
      "1902 0.001601641117846108 2633465 2740995\n",
      "1903 0.9810480418811696 139655 376026\n",
      "1904 0.9620260235635408 23 8\n",
      "1905 0.4804530139182014 1 3\n",
      "1906 2.7816025840048484 664707 3523328\n",
      "1907 0.022176961978686377 2564024 2209262\n",
      "1908 16.941486662020168 12507 203\n",
      "1909 0.4804530139182014 47 23\n",
      "1910 14.927759179339153 23936 1140327\n",
      "1911 2.317782555825454 54 11\n",
      "1912 0.029701659494763055 22556427 26798924\n",
      "1913 0.3275211331569299 909709 513284\n",
      "1914 0.6972040365648028 29131 67142\n",
      "1915 5.502468897125644 156269 1631604\n",
      "1916 0.4804530139182014 18 37\n",
      "1917 0.29371586677228934 1045674 1797897\n",
      "1918 1.1996820426076433 1068696 3195488\n",
      "1919 0.9171432214485847 184 70\n",
      "1920 9.77213302254441 837866 36774\n",
      "1921 0.6203309663958689 93156 204771\n",
      "1922 0.09077941983398836 263655 356359\n",
      "1923 2.435877234289979 121 580\n",
      "1924 18.377628849008488 36879 506\n",
      "1925 0.2253701599116557 709053 1139862\n",
      "1926 0.4415988952232604 160708 312349\n",
      "1927 0.18685049849348115 7565039 11655750\n",
      "1928 8.6935823306814 139014640 7286993\n",
      "1929 1.4464654767163452 17953 59769\n",
      "1930 0.11226466535586448 55505533 77598020\n",
      "1931 0.07573610886046586 484354 637796\n",
      "1932 10.025573312877627 1185 49\n",
      "1933 0.47184120803112134 258247 513282\n",
      "1934 0.011600794876700795 677286 754308\n",
      "1935 0.30195612116421083 254087 440180\n",
      "1936 0.0005546074839056518 1317646 1349045\n",
      "1937 29.518175298599918 512850 2240\n",
      "1938 0.9576474209186144 55 148\n",
      "1939 1.0677237494437246 115 325\n",
      "1940 0.9793634483146276 165903 61668\n",
      "1941 8.651439471946269 30700 581509\n",
      "1942 6.64719429511375 38880 512202\n",
      "1943 4.6780594438239484 47854 5502\n",
      "1944 0.8067610156655973 1583 3888\n",
      "1945 1.1040376267715324 1318391 3770262\n",
      "1946 1.4699080465836987 1255491 4220379\n",
      "1947 4.582644828169615 862661 7337358\n",
      "1948 0.05554506920897512 144854 183352\n",
      "1949 0.03949763539068215 652436 795884\n",
      "1950 2.3177855605901194 1087038 4982266\n",
      "1951 1.6412800393551097 3146 873\n",
      "1952 0.002164724843518185 34497 36140\n",
      "1953 1.3528036519644313 230318 736983\n",
      "1954 0.08838388943314013 274060123 203578815\n",
      "1955 0.10705935005946336 9445 6809\n",
      "1956 5.118385555225201 114214 11889\n",
      "1957 0.09670853590148117 39573 28996\n",
      "1958 0.0012121706197224572 441240 426142\n",
      "1959 1.8463017207777603 1326492 340871\n",
      "1960 7.843256853937188 272 4491\n",
      "1961 0.0008536170341935021 12834881 13215406\n",
      "1962 31.417588527964973 493352 1814\n",
      "1963 0.7892369242863695 1196029 2907802\n",
      "1964 0.042962462750038836 2826412 3477390\n",
      "1965 0.5877986124022774 11176366 24058339\n",
      "1966 3.0275504988676043 3756037 21399133\n",
      "1967 0.006639562203674326 17826 16431\n",
      "1968 1.4926612859736499 300740 88632\n",
      "1969 45.06502034791416 1874143 2276\n",
      "1970 1.4001473295571414 82 270\n",
      "1971 3.1707838728883937 767237 129298\n",
      "1972 0.008323348933689299 2063605 2260728\n",
      "1973 0.009741786401060654 473442 428945\n",
      "1974 3.497639034293427 5114553 33191299\n",
      "1975 2.250715918347053 231 1039\n",
      "1976 1.4667396471626568 46 13\n",
      "1977 14.276998868027325 3 174\n",
      "1978 19.411196114228147 282 23184\n",
      "1979 0.03185077392425235 1355039 1133559\n",
      "1980 2.5722001384780433 7190987 35753085\n",
      "1981 23.796513615040602 153 20233\n",
      "1982 1.0529679072043119 877347 314429\n",
      "1983 1.907203378761459 245578 977143\n",
      "1984 9.949201699102053 130 3069\n",
      "1985 0.002186908083858266 11024438 11552234\n",
      "1986 2.693841669998991 30108 5832\n",
      "1987 0.013854607197608044 2556909 2876300\n",
      "1988 3.7263019951753775 249223 1717654\n",
      "1989 0.23571783918511793 95 155\n",
      "1990 3.8103827476920475 13724874 96662936\n",
      "1991 0.05471584863587564 11185174 14132871\n",
      "1992 0.051597420711353995 62861 78892\n",
      "1993 4.532865046409372 29978 3565\n",
      "1994 0.0635734281285935 196555 252922\n",
      "1995 0.36707166673071356 334 613\n",
      "1996 0.005701450197417859 3264810 3520875\n",
      "1997 1.2977371766159351 635 1986\n",
      "1998 0.3898507881402628 294 157\n",
      "1999 0.08671933354235722 955777 711975\n",
      "2000 1.4287102635654458 707553 214117\n",
      "3.0540805460065594\n"
     ]
    }
   ],
   "source": [
    "\n",
    "test_label = {}\n",
    "with open('testlabel.csv', 'r') as csvfile:\n",
    "    Data = csv.reader(csvfile, delimiter = '#')\n",
    "    for query in Data : test_label[query[0] + query[1] + query[2]] = int(query[3])\n",
    "\n",
    "Y_real = []\n",
    "with open('test_without_label.csv', 'r') as csvfile:\n",
    "    Data = csv.reader(csvfile, delimiter = '#')\n",
    "    for query in Data : Y_real.append(test_label[query[0] + query[1] + query[2]])\n",
    "        \n",
    "qerror = 0\n",
    "for i in range(len(Y_real)) :\n",
    "    y_hat = int(math.exp(Y_test[i]))\n",
    "    y = Y_real[i]\n",
    "    qerror += (math.log((y+1)/(y_hat+1)))**2\n",
    "    print(str(i+1) + \" \" + str((math.log((y+1)/(y_hat+1)))**2) + \" \" + str(y_hat) + \" \" + str(y))\n",
    "print(qerror / len(Y_real))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
