{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "ccefc5da",
   "metadata": {},
   "outputs": [],
   "source": [
    "# importing libaries\n",
    "\n",
    "import torch\n",
    "import torch.nn as nn\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import scipy.io"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "f1e9bd2d",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Define the RNN model\n",
    "class RNN(nn.Module):\n",
    "    def __init__(self, input_size, hidden_size, output_size):\n",
    "        super(RNN, self).__init__()\n",
    "\n",
    "        self.hidden_size = hidden_size\n",
    "\n",
    "        self.rnn = nn.RNN(input_size, hidden_size, batch_first=True)\n",
    "        self.fc = nn.Linear(hidden_size, output_size)\n",
    "\n",
    "    def forward(self, x, hidden):\n",
    "        output, hidden = self.rnn(x, hidden)\n",
    "        output = self.fc(output)\n",
    "        return output, hidden\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "ab942109",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Toy problem data\n",
    "input_size = 256  # number of columns in a dataset\n",
    "hidden_size = 32  # number of neurons\n",
    "output_size = 256  \n",
    "sequence_length = 160  # number of sequences/ number of rows\n",
    "batch_size = 1\n",
    "num_epochs = 20000"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "19450d3a",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'__header__': b'MATLAB 5.0 MAT-file Platform: posix, Created on: Thu Jul 20 15:02:48 2023',\n",
       " '__version__': '1.0',\n",
       " '__globals__': [],\n",
       " 'h': array([[0.03949907, 0.03945555, 0.03968762, ..., 0.04081577, 0.04034067,\n",
       "         0.03995187],\n",
       "        [0.03949522, 0.03972834, 0.04007071, ..., 0.0406224 , 0.0405568 ,\n",
       "         0.04021852],\n",
       "        [0.03980837, 0.03966293, 0.04008707, ..., 0.04077249, 0.04050591,\n",
       "         0.04019281],\n",
       "        ...,\n",
       "        [0.04915863, 0.04935256, 0.04961976, ..., 0.04917885, 0.04899574,\n",
       "         0.04881741],\n",
       "        [0.04904458, 0.04916197, 0.04943338, ..., 0.04902097, 0.04897786,\n",
       "         0.04875013],\n",
       "        [0.04913692, 0.04932824, 0.04941899, ..., 0.04888785, 0.0488357 ,\n",
       "         0.04855312]])}"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# Load the .mat file\n",
    "v_data = scipy.io.loadmat('v.mat')\n",
    "h_data = scipy.io.loadmat('h.mat')\n",
    "x_data = scipy.io.loadmat('x.mat')\n",
    "h_data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "94133527",
   "metadata": {},
   "outputs": [],
   "source": [
    "x = x_data['X']\n",
    "u = h_data['h']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "fde5d937",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<torch._C.Generator at 0x7fc7f4146790>"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# Set random seed for reproducibility\n",
    "torch.manual_seed(42)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "9be3a335",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "test data shape (256,)\n",
      "input data shape (160, 256)\n",
      "Target data shape (160, 256)\n"
     ]
    }
   ],
   "source": [
    "input_data = u[0:160,:]\n",
    "target_data = u[1:161, :]\n",
    "\n",
    "test_data = u[160, :]\n",
    "#test_target = u[161:201, :]\n",
    "\n",
    "print(\"test data shape\", test_data.shape)\n",
    "#print(\"test target shape\", test_target.shape)\n",
    "\n",
    "print(\"input data shape\",input_data.shape)\n",
    "print(\"Target data shape\",target_data.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "a46d6749",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "input tensor shape torch.Size([1, 160, 256])\n",
      "Target tensor shape torch.Size([1, 160, 256])\n"
     ]
    }
   ],
   "source": [
    "# Convert data to tensors\n",
    "input_tensor = torch.tensor(input_data).view(batch_size, sequence_length, input_size).float()\n",
    "target_tensor = torch.tensor(target_data).view(batch_size, sequence_length, output_size).float()\n",
    "\n",
    "print(\"input tensor shape\",input_tensor.shape)\n",
    "print(\"Target tensor shape\",target_tensor.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "7aea6d27",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Convert test data to tensors\n",
    "test_tensor = torch.tensor(test_data).view(batch_size, 1, input_size).float()\n",
    "#test_target_tensor = torch.tensor(test_target).view(batch_size, 40, output_size).float()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "398fcbe4",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 10/20000, Loss: 0.0003271347959526\n",
      "Epoch: 20/20000, Loss: 0.0001071695587598\n",
      "Epoch: 30/20000, Loss: 0.0001001052805805\n",
      "Epoch: 40/20000, Loss: 4.0288696289062500\n",
      "Epoch: 50/20000, Loss: 0.0067166052758694\n",
      "Epoch: 60/20000, Loss: 0.0005897184601054\n",
      "Epoch: 70/20000, Loss: 0.0004613070632331\n",
      "Epoch: 80/20000, Loss: 0.0002590834046714\n",
      "Epoch: 90/20000, Loss: 0.0001046567704179\n",
      "Epoch: 100/20000, Loss: 0.0000695698909112\n",
      "Epoch: 110/20000, Loss: 0.0000625212414889\n",
      "Epoch: 120/20000, Loss: 0.0000595404344494\n",
      "Epoch: 130/20000, Loss: 0.0000581965250603\n",
      "Epoch: 140/20000, Loss: 0.0000564686270081\n",
      "Epoch: 150/20000, Loss: 0.0000562137211091\n",
      "Epoch: 160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1910/20000, Loss: 0.0000562101558899\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 1920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 1990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 2990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3800/20000, Loss: 0.0000562101558899\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 3810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 3990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 4990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5690/20000, Loss: 0.0000562101558899\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 5700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 5990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 6990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7570/20000, Loss: 0.0000562101558899\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 7580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 7990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 8990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9460/20000, Loss: 0.0000562101558899\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 9470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 9990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 10990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11320/20000, Loss: 0.0000562101558899\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 11330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 11990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 12990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13150/20000, Loss: 0.0000562101558899\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 13160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 13990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 14980/20000, Loss: 0.0000562101558899\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 14990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 15990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16810/20000, Loss: 0.0000562101558899\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 16820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 16990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 17990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18640/20000, Loss: 0.0000562101558899\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 18650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 18990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19000/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19010/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19020/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19030/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19040/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19050/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19060/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19070/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19080/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19090/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19100/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19110/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19120/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19130/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19140/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19150/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19160/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19170/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19180/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19190/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19200/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19210/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19220/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19230/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19240/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19250/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19260/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19270/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19280/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19290/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19300/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19310/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19320/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19330/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19340/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19350/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19360/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19370/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19380/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19390/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19400/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19410/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19420/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19430/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19440/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19450/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19460/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19470/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19480/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19490/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19500/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19510/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19520/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19530/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19540/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19550/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19560/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19570/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19580/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19590/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19600/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19610/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19620/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19630/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19640/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19650/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19660/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19670/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19680/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19690/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19700/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19710/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19720/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19730/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19740/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19750/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19760/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19770/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19780/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19790/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19800/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19810/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19820/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19830/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19840/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19850/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19860/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19870/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19880/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19890/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19900/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19910/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19920/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19930/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19940/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19950/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19960/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19970/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19980/20000, Loss: 0.0000562101558899\n",
      "Epoch: 19990/20000, Loss: 0.0000562101558899\n",
      "Epoch: 20000/20000, Loss: 0.0000562101558899\n"
     ]
    }
   ],
   "source": [
    "# # dt=0.25, 0.15\n",
    "rnn = RNN(input_size, hidden_size, output_size)\n",
    "\n",
    "# Loss and optimizer\n",
    "criterion = nn.MSELoss()\n",
    "optimizer = torch.optim.LBFGS(rnn.parameters(), lr=0.1)\n",
    "\n",
    "# # Loss and optimizer\n",
    "# criterion = nn.MSELoss()\n",
    "# optimizer = torch.optim.Adam(lem.parameters(), lr=0.001)\n",
    "\n",
    "# Training loop\n",
    "for epoch in range(num_epochs):\n",
    "    def closure():\n",
    "        optimizer.zero_grad()\n",
    "        hidden = torch.zeros(1, batch_size, hidden_size)\n",
    "\n",
    "        # Forward pass\n",
    "        output, hidden = rnn(input_tensor, hidden)\n",
    "        loss = criterion(output, target_tensor)\n",
    "\n",
    "        loss.backward()\n",
    "        return loss\n",
    "\n",
    "    optimizer.step(closure)\n",
    "\n",
    "    # Print progress\n",
    "    if (epoch + 1) % 10 == 0:\n",
    "        print(f'Epoch: {epoch + 1}/{num_epochs}, Loss: {closure().item():.16f}')\n",
    "\n",
    "   "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "e30cfc4a",
   "metadata": {},
   "outputs": [],
   "source": [
    "# # Create RNN instance\n",
    "# rnn = RNN(input_size, hidden_size, output_size)\n",
    "\n",
    "# # Loss and optimizer\n",
    "# criterion = nn.MSELoss()\n",
    "# optimizer = torch.optim.Adam(rnn.parameters(), lr=0.01)\n",
    "\n",
    "# # Training loop\n",
    "# for epoch in range(num_epochs):\n",
    "#     # Set initial hidden state\n",
    "#     hidden = torch.zeros(1, batch_size, hidden_size)\n",
    "\n",
    "#     # Forward pass\n",
    "#     output, hidden = rnn(input_tensor, hidden)\n",
    "#     loss = criterion(output, target_tensor)\n",
    "\n",
    "#     # Backward and optimize\n",
    "#     optimizer.zero_grad()\n",
    "#     loss.backward()\n",
    "#     optimizer.step()\n",
    "\n",
    "#     # Print progress\n",
    "#     if (epoch+1) % 10 == 0:\n",
    "#         print(f'Epoch: {epoch+1}/{num_epochs}, Loss: {loss.item():.4f}')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "a330d1ca",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1, 256)\n",
      "(1, 201)\n",
      "(201, 256)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/data/localhome/tkapoor/.local/lib/python3.8/site-packages/numpy/ma/core.py:2820: ComplexWarning: Casting complex values to real discards the imaginary part\n",
      "  _data = np.array(data, dtype=dtype, copy=copy,\n",
      "/usr/local/lib/python3.8/dist-packages/matplotlib/contour.py:1180: ComplexWarning: Casting complex values to real discards the imaginary part\n",
      "  self.levels = np.asarray(levels_arg).astype(np.float64)\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "<Figure size 432x288 with 0 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzMAAAFNCAYAAAA0HDEuAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAABj9ElEQVR4nO29ebwsV1mv/7zd+5wTMSEgiUwJBGVQHNGAKHIZgswSFUFQkCARUVAvFxliuMAPr9coXgUvKB4RA4oMIkiUKASFC8pgoqJMgpExIRpCgICQ5Oze7++PqupdXbuGVVNXVff3+Xz2Z3fNq6uqV62n3jWYuyOEEEIIIYQQU2M2dAKEEEIIIYQQogmSGSGEEEIIIcQkkcwIIYQQQgghJolkRgghhBBCCDFJJDNCCCGEEEKISSKZEUIIIYQQQkwSyYwQQsSY2cfN7D4DHfssM/vb1PSXzOzrhkjLWDGzHzOzNw+dDiGEEONBMiOE2EjM7HvN7J1m9gUzu9rM/s7M7jx0ukJx9+Pd/aNDpyOWrEUsV+m/W/R83NPMzM1sJ5nn7q9w9/v2eVwhhBDTYqd6FSGEmBZmdkPgL4CfBl4DHAbuDlzX4TF23H23q/2NgZLv9C53/961J0gIIYSoQJEZIcQmcnsAd3+luy/c/Svu/mZ3/xcAM/tJM/uQmX3RzD5oZt+R2vbbzexf4ojOq83suHibe5rZZWb2dDP7D+APzOyImT3fzD4d/z3fzI5k1n+KmV1pZleY2WOTg5jZTczsAjO7xsz+Hvj69BeIoxK3jT+fb2YvMrM3xml+j5l9fWrd+5rZh+M0/7aZ/T8zOzu1/Cfi7/s5M3uTmd06c5wnmtm/Af9W90Sb2Z3M7B/jdL3azF5lZv8rXrZSdS7nez3IzP4pPgefMrPnpFZ9e/z/83Ek6LtzquJ9j5ldHH/vi83se1LL3mZmvxRH5L5oZm82s5Pqfj8hhBDjRjIjhNhEPgIszOxlZvYAM7txssDMHgY8B/hx4IbAQ4DPprZ9OHB/4DbAtwJnpZbdDPga4NbA44FzgbsC3w58G3AX4JmZ9U8Ebgk8DnhRKi0vAq4Fbg78RPxXxiOA/w+4MXAp8Mvx9zkJeC1wDnAT4MNAulB/JvCLwA8BJwPvAF6Z2fcPAN8F3LEiDSuY2WHgz4A/JDovfwI8tMYu/ovoOtwIeBDw02b2A/Gy/xb/v1Fc5e5dmWN/DfBG4LeIvvdvAG80s5ukVvtR4LHA1xJF536hRtqEEEJMAMmMEGLjcPdrgO8FHPg94DNxFOSmwNnAr7n7xR5xqbt/IrX5b7n7p939auDPiUQlYQ94trtf5+5fAX4MeK67X+nunyGSjUen1j8WLz/m7hcCXwLuYGZzokL/s9z9v9z9/cDLKr7W69397+NqYK9IpeuBwAfc/XXxst8C/iO13ROAX3H3D8XL/zdR9OnWqXV+xd2vjr9THnc1s8+n/v49mQ8cAp4ff8fXAhdXfI8l7v42d3+fu+/FUbNXAvcI3PxBwL+5+x+6+667vxL4V+D7U+v8gbt/JP5er2H1WgohhNgAJDNCiI0kLryf5e6nAN8M3AJ4PnAq8O8lm6ZF4MvA8anpz7j7tanpWwBpEfpEPC/hs5k2KMn+TiZqs/ipzLZlFKXrFun9uLsDl6XWvTXwgkREgKsBI4oWJXwKwMzunmrg/4HU8ne7+41Sf0kVt1sAl8fHDP0eS8zsu8zsrWb2GTP7ApF4hVYFy5775Njp71V2LYUQQmwAkhkhxMbj7v8KnE8kNZ8i0z6lzq4y058mkoWEW8XzqvgMsEskVultm3AFcEoyYWaWnib6vj+VkZGvcvd3ptZxAHd/R1yl63h3/6bAY98yPmbe9/gv4AaptN0ss/0fAxcAp7r7icCLiURrmaYSsuc+OfblAekWQgixIUhmhBAbh5l9Q9zw/pR4+lTgkcC7gZcAv2Bm32kRt81UuarDK4FnmtnJcduVZwF/VLWRuy+A1wHPMbMbmNkdgcc0TMMbgW8xsx+wqBvjJxK11Ul4MXCOmX0TgJmdGLcb6oJ3EUnZz5nZITP7IaJ2Qwn/DHyTmX27RR0pPCez/QnA1e5+rZndhaiNS8JniKr1FY21cyFwezP7UTPbMbMfIWrz8xetv5UQQojJIJkRQmwiXyRq0P4eM/svIol5P/AUd/8Tosbzfxyv92dEjdeb8L+AS4B/Ad4H/GM8L4QnEVV7+g+iqNEfNEmAu18FPAz4NaKODO4Yp+m6ePnrgV8FXmVm1xCdhwfUPMx328FxZu7s7tcTdSxwFlH1tR8hkrQkbR8Bngu8haintL/N7PdngOea2ReJRPA1qW2/THSd/i6uInfXzPf+LPBg4Cnx934a8OD4fAghhNgSbLWqsxBCiCljZjOiNjM/5u5vHeD45wOXufszq9YVQggh2qLIjBBCTBwzu5+Z3ciiMW5+kajdybsHTpYQQgjRO5IZIYSYPt9N1EPbVURdE/9ASTfLQgghxMagamZCCCGEEEKISaLIjBBCCCGEEGKSSGaEEEIIIYQQk2Rn6ATU4aQTT/Bb3ezkoZNRA1XhE0IIMXWsehUhNpB/+sjHrnL3URc8v3P+1X6NL2pvd6lf9yZ3v38PSVo7k5KZW93sZP7ud3956GQcZG936BQIIYQQ/TKbVJFBiNbc4F4/+omh01DFNb7gBcedVnu7B33lwyd1n5phUM5UhURFCCGEqPc8lPgIIdbExHIbz89M62SakhMhhBCiX7p41kqIhKjEZsb8q+b1N9ygzvs3I6eQoAghhBCbRdmzXaIjhIhRbiCEEEKIaaEqb0IAYHPj0AkNIjNXd5+WodAvXAghhBCbS5X4SHaEmDT6BQshhBBie2lSVT1PgJpWec/uK70fiZYQlehXIoQQQghRhy7b6pbtq+s2wZKjjcNmsHN8g2pmG4TuaiGEEEKIbaCrKJQQI0J3qBBCCCGEyCdEgCQ8YkB09wkhhBBCiOa0HQNQNMeM+aHZ0KkYFN1pQgghhBCiW+pWaZP8iIZst8oJIYQQQojh2dtt/ic6x8xeamZXmtn7C5afaGZ/bmb/bGYfMLPHxvO/3czeFc/7FzP7kdQ2Z5jZP5rZe83sb83stl2kVRoshBBCCCGmyxYLjc1g57heivPnAy8EXl6w/InAB939+83sZODDZvYK4MvAj7v7v5nZLYB/MLM3ufvngd8BznT3D5nZzwDPBM5qm1DJjBBCCCGEEGKJu7/dzE4rWwU4wcwMOB64Gth194+k9vFpM7sSOBn4fLzNDePFJwKf7iKtkhkhhBBCCCEmiJkxP9yo1chJZnZJavqoux+tsf0LgQuIhOQE4EfcfS+TtrsAh4F/j2edDVxoZl8BrgHu2iThWSQzQgghhBBCbBdXufvpLba/H/Be4N7A1wMXmdk73P0aADO7OfCHwGNSkvNk4IHu/h4zeyrwG0SC0wp1ACCEEEIIIYSow2OB13nEpcDHgG8AMLMbAm8EznX3d8fzTga+zd3fE2//auB7ukiIIjNCCCGEEEJMETN2jgxSnP8kcAbwDjO7KXAH4KNmdhh4PfByd39tav3PASea2e3jdjXfB3yoi4RIZoQQQgghhBBLzOyVwD2J2tZcBjwbOATg7i8Gfgk438zeBxjwdHe/ysweBfw34CZmdla8u7Pc/b1m9pPAn5rZHpHc/EQXaZXMCCGEEEIIIZa4+yMrln8auG/O/D8C/qhgm9cTRW06ZXCZMbM5cAlwubs/eOj0CCGEEEIIMQXMYLYzHzoZgzKGDgB+no7qzAkhhBBCCCG2h0FlxsxOAR4EvGTIdAghhBBCCCGmx9DVzJ4PPI1osB0hhBBCCCFEIDYzdo6omtkgmNmDgSvd/R8q1nu8mV1iZpdc9fkvril1QgghhBBCiLEzZGTmbsBDzOyBwHHADc3sj9z9UemV3P0ocBTgO+5wG19/MoUQQgghhBghZswPD13RalgGi8y4+znufoq7nwY8AvibrMgIIYQQQgghRBFj6M1MCCGEEEIIIWoziriUu78NeNvAyRBCCCGEEGIyaJwZRWaEEEIIIYQQE2UUkZlQzB3b3a29ne/sf82i7dPrlK0Xsm2XNPm+Q9DnORBCCCGEECKPrSiBhghBG2mYinD0ybafA8mcEEIIIdaOGTtHtrsMst3fXoiO2GaZk8gJIYQQYihUChFCtGKbRS4EyZ4QQgjRH3rKCiFEj2yD7EnYhBBiGKLezLa7Py89gYQQQrRi04VNsiaEEONFObQQQghRwqbLWhqJmxATw2bMDx8aOhWDolxLCCGEEMDw4iaZEkLURbmGEEIIIUbB0DLVF5I0MTXM7KXAg4Er3f2bc5b/GPB0wIAvAj/t7v8cL7s/8AJgDrzE3c+L598b+HXgMPAPwOPcvfWPXr8uIYQQQogeGZukSa42BzOYHerlep4PvBB4ecHyjwH3cPfPmdkDgKPAd5nZHHgR8H3AZcDFZnYB8K/Ay4Az3P0jZvZc4DHA77dNqO5mIYQQQogtYki5kkhNA3d/u5mdVrL8nanJdwOnxJ/vAlzq7h8FMLNXAWcCnwGud/ePxOtdBJyDZEYIIYQQQkyFdYqUxGltPA74y/jzLYFPpZZdBnwXcBWwY2anu/slwA8Dp3ZxcF1lIYQQQgixcYytel8vmDHbmTfZ8iQzuyQ1fdTdj9Y/vN2LSGa+t2w9d3czewTwm2Z2BHgzsKh7vDwkM0IIIYQQQmwXV7n76W12YGbfCrwEeIC7fzaefTmrEZdT4nm4+7uAu8fb3he4fZvjJ0xPZvY6kbjtYdbI1oUQQgghhMjFzG4FvA54dKodDMDFwO3M7DZEEvMI4Efjbb7W3a+MIzNPB365i7RMT2ZEPSR/Yl1InIUQQoi1YmbMDndfnDezVwL3JKqOdhnwbOAQgLu/GHgWcBPgt80MYNfdT3f3XTN7EvAmoq6ZX+ruH4h3+1QzezAwA37H3f+mi7RKZoQQ3SBxjpDUCSGEmDju/siK5WcDZxcsuxC4MGf+U4GndpLAFJIZIYTokm2WOomcEEKsF4PZfLvzXsmMEEKIbtg2kZO8CSHE4EhmhBBCiCZMVd4kYUKIDUIyI4QQQmwTU5WwUCRrYpvoqQOAKbHd314IIYQQm8UYZE1CJcTakMwIIYQQQnTJGIQqBEmX2AAkM0IIIYQQ28jYpEtyVRszY3bo0NDJGJRpyYw77B4bOhVCrJ+d7c6ohBBCbAFjkCsJ1eSYlswIsa1I4seDxFIIITaXMQiVqIVkRggh6iCxjJDUCSHE8JhhGjRTCCGEqMk2S51ETgghRoNkRgghhKjDNoscSOaEGBFRBwDbXZzf7m8vhBBCiHpMUeYkYEJsLNOSmU3rzUyZqxBCCNE/Uy07qJwgRCXTkplNY6qZq9ADRgghRP9MoZyg5+Hg2I46ABBC1GUKD5hNRw9QIYQYniGfh3oOCCQzQoipIqHsFhUKhBBTQ88BgWRGCCEEqFDQFZJCIcQ6McO2PN+RzAghhBBdse1SuOWFKiHE+pmWzLjDYjF0KkQRWz4CrRBCbD3bIHMSNrElmNn9gRcAc+Al7n5eZvlZwPOAy+NZL3T3l8TLfg14EDADLgJ+3t3dzH4EODfe51+4+9PbpnNaMiPGjUSzHpI/IYSYHlMVNknYZmLWS29mZjYHXgR8H3AZcLGZXeDuH8ys+mp3f1Jm2+8B7gZ8azzrb4F7mNn7iOTnO939M2b2MjM7w93/uk1aJTNCDIXkT0InhBDrYmgJk0xNjbsAl7r7RwHM7FXAmUBWZvJw4DjgMGDAIeA/ga8D/s3dPxOv9xbgoYBkRggxUSR03SExFEKMmXXL1LbIkxnsNCrOn2Rml6Smj7r70dT0LYFPpaYvA74rZz8PNbP/BnwEeLK7f8rd32VmbwWuIJKZF7r7h8zsxsAdzOy0eH8/QCQ8rZDMCCHEJiAxnC4SUSG6Z+hI1Pi5yt1Pb7mPPwde6e7XmdlPAS8D7m1mtwW+ETglXu8iM7u7u7/DzH4aeDWwB7wT+PqWaZiazLhuzk1nW96kCCFEgkS0XySLQjThcuDU1PQp7Df0B8DdP5uafAnwa/HnHwTe7e5fAjCzvwS+G3iHu/85kQRhZo8HWmeAE5MZsfFso6xK4IQQoj82TRYlZyKNGXaol3LExcDtzOw2RBLzCOBHVw9tN3f3K+LJhwAfij9/EvhJM/sVompm9wCeH2/zte5+ZVzl7GeAh7dNqGRGiKGZosBJwIQQYhjGImeSqo3G3XfN7EnAm4i6UX6pu3/AzJ4LXOLuFwA/Z2YPAXaBq4Gz4s1fC9wbeB9RZwB/FUdkAF5gZt8Wf36uu3+kbVolM0KI+kxRwEKQpAkhRBhDSJUEaq24+4XAhZl5z0p9Pgc4J2e7BfBTBft8ZMfJHE5mzOxU4OXATYms7ai7v6BsG3fwsbyRELUwZUBiCmyqpInmSHCFGA/rKANOrLxipjLWkJGZXeAp7v6PZnYC8A9mdlHOYDxiA9gECd32zEKIrUSCK6qQ8G4WG1Be2TYGk5m4wdAV8ecvmtmHiPq0lsyIUTIFIZNwCSHEmpma8Eq+xIYxijYz8eA5dwLeU7qiO35sYplGT/TUc4WYOGMSLomVEEKMkDHIl4SqQ2zrz+fgMmNmxwN/Cvx3d78mZ/njgccDnHqTE9ecuvGyyVInUdsMxiRWWSRaQggxIOsQqi0v4G8Tg8qMmR0iEplXuPvr8tZx96PAUYDvuM0tfY3JEwMxNlGTXG0eYxYtIcR00IuRETOGCNQ6MJtcpwVdM2RvZgb8PvAhd/+NoI3cYXe313T1zs7gwTBRkzHIlYRKCCHGxya8GJGQiakzZMn6bsCjgfeZ2Xvjeb8Y92m9uUxdxoqQpPXKuoVK8iSEENvB0EImmRJtGbI3s78FbKjji44Zk6RJrFqzLnmSNAkhxHbTt0xtvCyZOgCYVqnPHd+dfkg3i+1s+A9t3axLrCRNrVHESWwjY6i6KqaP8rMwho48if5RaWwETFHQJGD0K00SpV4YayFymwolY70GQkyNsf6Wtik/E+NAJSbRiCEEbKsEqmtRkhyNmrEWSoQQoi6qprxmVM1sYjLjjm9LV3sB2JbdvH0K1MaLUpdyJDESYvsYU7vIbUV57wpdSpPEaNrolzFhxiB2myJUXYjSxgtRQttCjR7IQuwjSRChjP1emXDeruj4tJncnTf29iVbU6CN6VOopiZKbe/Nrbl3JENizIy9wCjEWFnHb0f5fz6zLSk/FKC7omMkW93RpShNQYya3DtTup6dIRnabiQbk2Psz8VNZSOfD6oyLXKY1JV0d/aOhd/Is0OT+nprYR0PlTFmoG3EaMwiVOd6jvG6DEKbh6EeftVINkaBBEKM5R4Y7bNnQ/IqN8O3/Nm00d++jvhMjTGLWlcZ6FgywLoiNFb5Cb0uYznvo2Soh1/Rg2pDHsbbzFgKnEL0RV/3uJ5VImG8JWJRyjpEbWhhapoBDp3BhcrPVKVn6PO7lUhaRotkpJoxdFYj2jHG55XaqQ6HmX0N8GrgNODjwMPd/XM56z0GeGY8+b/c/WXx/L8Cbk7kIe8Anujui9D9ZpmWzLhvxUiuNh/HD6wrYVq3FNXN4IbK0Koe8GN8eEDx+dWDQUydbRMTSYYIZR33yrqfeZvzezeYrb04/wzgr939PDN7Rjz99JVURWLybOB0wIF/MLMLYjl5uLtfY2YGvBZ4GPCqkP3mMS2Z2RL6FrZ1y1JTKVqXBIVkaEMU1MseHmMUnbzzKMERY2BzCi2rSEYiNvX69s3Y8ucu7ucxPhs3lDOBe8afXwa8jYPScT/gIne/GsDMLgLuD7zS3a+J19kBDhPJTuh+DzApmYk6ANiczHs20CBNXclS31I0ps4exlb1Ki/TH2Mmnj5vY3twis1i6gXaTRGTqV+HbWLd12odz4Cmv6MxPj9Hzk3d/Yr4838AN81Z55bAp1LTl8XzADCzNwF3Af6SKDoTut8DTEpmNo2+xaxvWWoiRX0JUJX4DCU76yzAZzPxsWXOvruQ0IhOmFKBeSqSMqVz2iXqKGh9jLlzoKn8TnMxw+eNrvVJZnZJavqoux/d3629BbhZznbnpifc3c3Mc9Yrxd3vZ2bHAa8A7g1c1HS/47rTq3Bn7/rpZzyzw+s57W1lqQ8ZChWgrqWn7IHVZ4Y/ZNWrJHMem9QIUZexF7THWBAa+zlLs8lCMSRDn9e+nq0ak60zrnL304sWuvt9ipaZ2X+a2c3d/QozuzlwZc5ql7NfZQzgFKJqY+ljXGtmbyCqXnYRELLfA0xLZjaEvoSsa0lqIkNdCVCV9HQpO0UZ/joy4nWG3SU1YoqMqVA+BmkZ0/kYurDcJ5vQ2dDQnQmNqRMhjcnWORcAjwHOi/+/IWedNwH/28xuHE/fFzjHzI4HToiFZQd4EFGPZqH7PcC0ZMZhbwMymFlfVa1aSFJXIhQiQF0IT9mDpqsMPC8j7lpwtqVNySZ/N9EfQxfch5KXob73GOVkE6RiKIY6d+usWVFEm2f10PlOfQxfv7ieB7zGzB4HfAJ4OICZnQ48wd3PdverzeyXgIvjbZ4bz7spcIGZHQFmwFuBF5ftt4pJyYx3WM1sXVW98uhayLqQozrnte25qxKetrKTl4H3ITh9iM2mFfo35vtMaXRljUnTmnVJzLoKTUNJyhRFZJM6GcpjHR0PjaGTodB7fmztiqaCu38WOCNn/iXA2anplwIvzazzn8Cd6+y3iq29ikO3velSpprIURsBCjl3bb5f0cOkTSbch+D0ITabIDSjTv+UpKQpdb7jyMXHduZrfUvat8T0+V3WISxDysmmS8a6WNd57LsGRh5NnuljjEbWxgyfb3c18mk92d3ZW+ODbdZjoawLmWolDIGZRFPpKft+TdOdlwl3JThdiU1XdXu7FoK+28sMKjDbICh9EXLuBhaedQlNHyLTR7r7KnytQ1TGJiRDv9QcG52+ZB2gA6KhOhgSw6NSQAnrEKc2wlQ3I26SUVVJTxPZKUp3o/RlMsymcpNkgl1IzdjC1n2IzNrkRaIyPGXXYE2ik9xvU6nL3mU6u5SXPoRlnYIi+eiXMXVOVPe+qvPsX2cHQ2I9TKqk4D6ekGBXBdamwtREgvqoHlYmO3VFJy99tdOTygDbvNkZMjPrShS6lJje5GWEsmIDDWbbBh/qjfeaRSd9H45RbLpIUxfPuC6lpQ9ZGUJKNqHzoCHpu2p67jFrPP+77Hxocm29VM1sWjIzJrqUqiZiVEeC6ohPVaZTK3PJyRDaCM46xWYoqWkrDV0ITOfisiZhmaKEdEXb796LDBVd944kZ2zRmrbpaPNMaVv46kpY+pKUsYnIlCJEnfVU2tE1qFMGCD3Pod+x786HxHBMS2Z8j8X146pzmzA/3PxH0He3g1Xi04XsBGcmLQSnrdiMPaNqKhFtBaYTeelYWMYmJkNXO+j7TWGd891afPLulRaC01W0xnYODdIVc5P8v8390EZcuirE9y0nU5KNvlnHuWj7krN03wF5b1e1TsbWpkuEMymZcYe93b1O9jXbmXWyn4S2klVXhkIegKHCUyY7oaLTpopYOnPrU2ySjKpu3dp1FGSbCEUTiWklLh0IS5+SMrRw9Ekf361pgTjkGtYWno6iOG3FZp1CU1dimlyvpoWzNgXgLkVlHQXxdXYqNBXqVmPvsxpZV50V9dEp0VhwwGeb+/wLYdpXsAVdSVERdWWpjgyFik/Zw7KN6DQRnL7FZiyZUd0qg3Xloq7ANJKXFtLShaysTUp67uFt7dQshDc5z8G9BVXcB8Gyk70Xa8hNU7FJfmN9Sk0dkakrMXUFpklBtI2wdCkoQ4vIWNrw5tGmXW8X5zWknNDl+HZt2u922SmRGIZpXSl3dq9bT+axc6TlwJANZClUgKrEJ0R2ijLhkAywieBkM4s6b2TqhJlDQ8mh0Zk6Bb46D486klFHYGrLS01xaSMrnUjKugVkDNGe0IJjV+empBAfeg0rewsquI8qJadhFbUmYtNXlKYPkakjMHVloom4tIrsdCwoYxaOPun7e1c97+pex7pliAPblzz7i+7hNtEcMS6mJTNrpCtpqiNFIQIUIjxlslMlOnkZYF3BqfNGpg+pGeJtSqjIhMpGqMAEy0vP0tJIVLoofA8pG2uQB6Df75j3kK/7vXLSX3U/FBXSG0lOzehNHbGpE6XpcjycLiWmToGsjrg0iui0OD99Fc7H2g53SNq8EC2j7DkZcm+UlS2aVCNrKjnjw9ibbVhNg5pMSmaiNjPDhZWbdIdcR4pCxKdMeJqKTl3BqfNGJvRtS6jUrDOTCS2gh4hMiHSECEyQvASKSx1hqSUrTQr5ba/ruiM2XR5v51DtKmKtSY7X9LynCwEh5yLz/crup7yCfN69Wig4NeQmtEe0dbalCRGZriQmVF5qCVGDZ3QbUelbRvqukr5Ouqr9UUZZeSL0Ohc9U5u2763bpndsveeJaiYlM7izCMxU5z28ne97TJgq8amSnaJMtyoDqys46QypK7EJlZp1C00ZXUhMlcB0JS8h4hIkLHUK8XWvU1NB6Etk1tmg8nDJsfZ6eLC2EajdY+HXNikUVF2jVFqK7sNsIT9YcALkJiRaExKl6TI6k0eVxHQlMEH7qfE9m8hKW0kZQkLWVQ0+oW51+HV0oNRF+9+q+yXv2Vt0PxaVO7oY124suBl76gBgMwmVnibUFaW24dOEsoyyLFPLy8DqCE7XYlMlNWPIUMoK9lXfs1eBqRCXTqQlRA66Fp8mQtIy8/YRDtiZT8U1b9qtcSJQfcgShEvPYlF+/WN5CJGc7P1fKTclYtNUatoITVlUpkxkquSjSmC6kpc60lJXVrooiK9bNvqm7+9TVK6oey2KyhtN2//WafNbp41vk7a9YhxM6kr5nrN7XfsH786RdoWguqIUIj9tx4LJy9TqCE7om5YQsQmRmjZC0zY6U9X4v6nIlElImcD0JS+l0lIlD10IT8g6NYSkkXzM+svifN7Nvm3RrkDiZfnLXsi+V7evlKNQCQqJ/lQJT5nolEjOctDbKrkpEZuqaE1Z1bOuIzRNRaZMYioFqCL9IeJSR1jqFpC7KsgP3SPa0JQ9i5uc47xyR9P2wHXb/4a2+Q0VHHUAMB0mJTNdsW4hCpGfKuGpG0KFgxlRqNy0FZsQqWkrNH2wTolpIjCNxKVMKNrITtnyAEEJEpNAEWkqFT6S6opdpMOKCq0l56ZIog7IUaEQ7a9XKEBV4lMmPGWiUyQ5BYITJDcBYpMVlLIoTZHQzA7tdNKYvYnElG7TUl5CxCVUWJoUoruUkj5rdgxBWfmi6+rzIdeujvDUiep0JThN2kcPi7HX44u7KTCpb99HBwBNb9pQIQqVnqLMs67kdCE3dcWmqdRUCU0fFEVlmohMkZDUFpia8lJbXJqITtGyElEpFZSKjDZESEIL/T6vX2WtyTZjwBbRb7BO+ve3KZHwdGE459pkRWhFgHLlJ1qeKz2H5/myU7ddT4nghMhNiNiUSU0doalDXlSmSErqSkzZ87RMXqrEJURaQoWlyTO/Lxnp4kVoX5SVNdqej7xySOh1yXvG16kyX0dymghOnfY3YrxMSmb6oP9G/eX7r5KdvEyozluWULlpIzYhUtNEaLqOztQVmS4kpo7A1JKXOuJStG5NYSmUlQJRKROUsoJ0VcE8pOBedzTkyXZruXMcs73wAr/tLSrPny2OFa5TJkJLAcpc97T4FEtPgewUiU4oeYKTkZtgsamQmqIoTZ7QdBWdSZMnMnUlpihNZfJSJS5V0lLnGdykUL5OAemjENxVWSOEovJIm46X6vZAFlplPrQtcPbezZZT6vbWKsbJtK5ax4NmthkYs7tG/fn7qfOWJVRuQsQmpCpamdR0LTR1KZKfLkQmT0xqRWECBSZYXkLXy5uXU9ivIytFopJXyC0qGJcVqstkpEo8Qnp1aROSX5f4hEvKkcx21XnkrEwQSgTJSiQn73pG8+eZeQelJxGepexUiU6e5IREcrJyUxC1yRWbGlITIjQhhEZlsiJTR2LqCkyZvJQ9n0OemaGF5qYF9ym+ce8rzfkiEX6svDJK2fWrIzrZtDUVnCq5KYvaTEpqTOPMTOhqdU9XjduKaNLOJSGbqXQhNyFiExqt6Vpo+qSOyLSRmKYC01heGopLrrRkCvd5shIqKrnzCgQjLwMuk5EiCelCcAAWHdQ73rNwMZ95WeHhSMmyVeYtBeagvByJ5xf0GnRAJA4KUF4UKCs9eZEeWywCRSdHctpevpyoTfJ7rZKaKqHJ0iQ601RkQiWmrsAUPUOrCuBV0lKnUN2msL9pvZvlUfwcDz9vTcQnW2apU5U+pIF+SLX5qpolZWLT1yCtm4KZfQ3wauA04OPAw939cznr3Qp4CXAq4MAD3f3jqeW/BfyEux8fT98aeClwMnA18Ch3v6wqPZOSGXdncX2z7hnnh8MGi6qi7Vgw0LQxfzO5qRKbqmhN10LTJ3WqpDUVmSCJ6UpgquSlibhUSEuIsByYzhGFrFzkyUSemNSRmyLxqJKJhVW0Q6shIwf23SJLndP84TmblRcs5r4LOV9rX6ZWxSlPjvJkaFVgjhyQn+w2s8y1zEZ68mQnuSezEZ0VySkSnDrV1EqiNrnRmozUZKM02WpnfYxBUyUyIRJTR2DqyksX0hJa6O5CTJqWL8ZAURmn7nnJl4L6va2G1joJrUpfVYW+Sm5CxWaIsstEeQbw1+5+npk9I55+es56Lwd+2d0vMrPjgeWFMLPTgRtn1v914OXu/jIzuzfwK8CjqxIzKZlpQ5NMqokAFWUcTSQnVG5CxKap1JRVPyurdlYkNEXRmb46A8iLymRFoi+JqRSYtvLSQlyy0lJXWKpkJSsqVevniUmeVOSJSO56JVnbIq9Un+zLZ9G7owrK9tEXcwIb3FpOXmfF2+dJVJ4czT0jKr4gLUFpATogMUvpKReetOxkIztJFCf5vCI5FYJTW27SYlMVrcmRmqIoTXZZnejMAVGpKTIhEhMqME3kpUxcQoSlTqG8SynZvXb4N/Q7x1WXH7oq4zR5YVvnBW3efVAlOFVyU9X2po7YTFFqnPDaBx1yJnDP+PPLgLeRkRkzuyOw4+4XAbj7l1LL5sDzgB8FfjC12R2B/xF/fivwZyGJ2RqZaUJV5lBHduqOAwN12rtUi00XUlM3StNXhCZvjJm8qEwTkQmpUtapxNQRmDJ5aSguZdJSJiBlslIlKmn5yMpJVkzypCQrEnuec+/lyMYiZ71ofsnLgL3wB8TCLXjdOsyt2q7mJZGZueWISY7wZGUnK0XZ/aQlaOYLjs1SYuOryxLpKRKeItnJE5205KQjOXmC05ncJBREa7JSkxel6TJCU0dkmkhMqMAUyUtTcQkRlrqF9iFkZHFsP43zQ+1rhbT9DkUy1KSMU+eFbeh4LlU1T6rkJlRs6ryUrTuo6xZyU3e/Iv78H8BNc9a5PfB5M3sdcBvgLcAz3H0BPAm4wN2vMFt5dv4z8EPAC4gk5wQzu4m7f7YsMZOSGd/rPmMKeeNRRFFGECo5Ie1TEuqKTRupaSI0dVhH25kuRKaNxDQWmJLoSxN5KRKXskhLkbQUrRMqKyvzM1lPWj6ycpIVk7SUZEUkTz7yJCNPYIpkZHev+ve8CFinLfNZecFjp2B5ngwdEJPMOlk5Sq+fFqG0AKXlZ2X9tPTE+01Xe0tkp0p0ZqlrmywvE5xQubHksFViUxCtSaRmpU1NTpSmSGjS0Rmbz5e9q4XSRmSqJCZUYIrkpam4hApLk/JAWjTWwbqOVyZNVeepruyESk6I4FTJTVVHA3XEpmlNk/FjTdt7nmRml6Smj7r70eVezd4C3Cxnu3PTE+7uZrlv3XaAuwN3Aj5J1MbmLDP7S+Bh7Ed20vwC8EIzOwt4O3A5VFdLmJTM9EHTH3kZ2QwgRG6aiE1fUlO2/yKhWXd0Jk1IW5lORaYLiQmMwBQJTBt5qRKXEGmpKyyJkKRFJVRS0oKSFo4DIpORkbSE5MnGYi+7fo705MwrW7+IvRrlmVmNZ+nOLD9qM8+Zn103u05altJilBadValJzU8kJUd68oQnWa9MdNLteRLRWUpMRnByIzgFcpNue2OL3XyxKctScqQmN0pTITR9ECoyXUhMHYFpIy6hwtJWGna/1G17pq7YOT4sOhzy/YuEp+wc55WB8q5ZiOBUyU0bsalTlqmSmmkKTSOucvfTixa6+32KlpnZf5rZzePIys2BK3NWuwx4r7t/NN7mz4C7EkVybgtcGkdlbmBml7r7bd3900SRGeI2Ng91989XfZGtl5kq8n7kdQWnrtyEik2o1FS1qakbpakrNF2SV8XswDoV1bzqiExZNKa1xHQsMHXkJS/iUiUuedKSJyywLy3peXmyUiUq6XlFcpIWjrRgVMlKWjB2FwfFJG/eoqC80KTW0Mq4lA2qOxf97Oez7HpeOp2Wp7TwpGUnmT9fWb4XL9uDRXQPJYKzlJVkOiM7edGdItFJqrCtSs6q4OzLzS57s3kkN7NDzPaO4bP5MnKTFpvVk5YVm1RVtKIsOJGaoihNgdAk1Klulo6+pKuYrcwvEJmyaEyVxIREYYqiL0UCUyYvVeJSR1jWLSfHvhgd79AJ3bZdqPs9yuSn7PyFik5oFKdKbtqITROpqVP9bMuEpikXAI8Bzov/vyFnnYuBG5nZye7+GeDewCXu/kZSER8z+5K73zb+fBJwtbvvAecQ9WxWybRkxr3R25cu6qymCf1xF5H86OtEbNpITUiUpmysmjGTjcq0EZlOojEdS0wTgWkjL3kRlzJxyYu0JNKSF11J5GRlWTwvT1byRCVvXpGYpD8vVtZhhbRY7C6SAnv0P2/Q+jyp2d0N6DkAWCwC2sDMw6I+OzsH1zsgM5mf9k5qhZVbdH5w+0R80gKUyE9WcFanDx2QnRDRmdsexzjEnAXHOBQJTtx5wTE7wpzdZcQmuS+jiE6R3KxGbtJRmyKxSc6ox+ctKFoDB6I0RUKTF51pO4hm1yLTVGLyBKapvIQ865sKSyIefdDXvkMlqeicVEV4sue7S7lpIzZdSU3dGidVg8GOCTdr1QtnQ84DXmNmjwM+ATwclj2UPcHdz3b3hZn9AvDXFoVg/gH4vYr93hP4lbja2tuBJ4YkZpol2JpUZYptZSf9464jNnWlJqT6WdMoTZHQ1I3OdEmdrpaD9hfQ2D93WUE0BgJEpo7EVERh8qqQ5UVgQgSmLPISIi9F4pIXaclGWXaXolItLGWykvzPk5Q8OUlLyco2KQlJi0ZWTrISspvX81PFAzBEZPIok5udnDeI6XnZbRMBSs9PS1EiM8ltuTM3wPZv6Yz0ZIUnT3byRCepyjbfiw4UyUy54CxsHiQ385TEzPaOsTfbWYnazKAyYpMrNXv70Zjl//n8QJSmSGiytOkMoGhgzML1C0SmKhrTpcS0kZc64tKnrAAsvlK9//lXdVu4LPtOIaKTd/7qRHFC5KZKbKq6j+5aasZWptkk4gb5Z+TMvwQ4OzV9EfCtFfs6PvX5tcBr66ZHV47wH20ITcQmVGpCozR9dHEcSl54dqhxZ3LHckmWhVYtC6lW1jAaExqJKZOYsihMSASmSGDqyktdcamSlrzoymK5LP4ui2R6VVaW6+VISv68VCEv9XlVbNLzM+0Ncgphu8cK2hMU1VOrYJYNt6TYOTTPTK+uO09tmyc5ybz8ZZY/PcvKTrROnuikJWdfauJjznwZ2ZnP5iuCE0VwdpibM58tmFvUbfbc9tgjvp9svoz07NmcmS9Y2M7+59kO871IYPbiamfAgapoZRjA3i6+s5MfpakhNGXRmeXxGnQCkFAUlelLZNpKTJnAhMpLG3EJkZIh9l1XhPLOQV3BCY3chHQ00EZqygb5LKt90oXQiOkyqMyY2f2Jul+bAy9x9/OGTE9CV90q7l67WztS08Xgnk1+9NHxx/3Dz7aXqapitrqs+DqUtpGpoo7IBEZjyqqThURi+paYNgJTJS9l4lIlLVlhycpKnqgk89KCkshJWkj2VpZnCoCpZYucqkJFYtOWrMQAzFP3+YrEpNZNxCg9LxGgZJtEbNLCc3CexZ+9UHLmcVRnZx4vS0VxZjO4jn2h2ZkZ1zNnPvO4ilokN3Nz5m4cEBtgzkGxmbO7lJm577I3n8fRmuja7OUITFGua4sFPt9ZidLY7m70W06iNFAqNF2S7ZIZynsMi5YXVy1LUyYyIdGYdUlME3npU1r6IC+9bQSnjth0JTVlZZ+ysk5ToSk+Vng1+ilHZ6oGg950Bvv28YA5LwK+j6jHg4vN7AJ3/+BQacpjcWxvdEIzlh/cWNJRl2BhCW3sD2sVmTrRmC4kpk4UpiwCUyYwIVGXEHnJikuRtJQJSzIvT1LS6y8yBctFVnLyeoRq0S4izTxHzuep+3qeEpX5PC0t8wPbzzNyM5vPlp/zJKdIcNJRnJ0d4zp8KTeLTPQmufY7c2dvDrOZHRCb+cxZ7M3iKmnG3JyFw9yNuc2XVdEWzKIOBRyw6N6ds9ivJ5ZQkY0nnQVAdvyaWGiSbp13dlaFJjpRq0KToio6k0y3bTeTJmSMjLI2MmXjxETLuxeZriVmagJTRfr7NBWbrqVmTEJT1dlR6H7ENKksiZrZHbOCYWb3dPe3tTz2XYBLU122vYpoRNFRyQy0F5qxUfdHP0XKqpitrJftIrnOfqu2zVnu2dbYrFYt259XHJFJ6ENk6lQnK4vE5FUjC5WYsghMWfQlRGASGcmKS3q9rLikpSURlrSoZAXloOTkFyx3a7Z5ANhJ3jBeuz9vnolIzudz+EqyLCUtO3OuvzYWnWuvX0pOIi6LRdx2ZT7j+muPsXMoWj8tN4tDs6XYpOVxZ2fGYrEvNotFFJXZJb4fZh6V5ReR2MznSdsbY3dhy+pou+zfN/ttbaL97hJ3JsAcWMDePBKXZEycKEi0/zn+n1Q9g2hX87jtDLBS7SyZLhUaSFU5S3o7i0Vm5ULlzJsoIVXLQlnHmCubJjF5LL6yaNQm59gXF532ttZWaMbIVF/SbjshV+w1ZvaHwK8Bx8X/Twe+u+Wxbwl8KjV9GfBd2ZXM7PHA4wFuWlKNaMxM8Qe9jYQ2+i+lphyl8QABK2vkH0JVRCaPsp7IyhrzZ6MzcLA6WfQ5Xj/daD9QZMoiMUVRmKIITBuJWV13tRBbJC15VYaq2L2+9iYN2DkwVVx8Te6btNSkum+eW3S9dgx2939WK91Sx7vYXRg7REKTtK3Zv3dmK+PgRPfgYmWsm4XPlp0DLOexsxSa5bzZzsoAnUnnANHn6jY0heQITB9VzTaRPqqWbTpNhSaU3S8tgse4EcPiDNKb2agIKQl9F/CrwDuBE4BXAHfrM1Fp4tFIjwJ8w1fdoFlXQAMzNpHZ9KhMU6KqH/G52d0vefmxY/sD4i0W5VGfpJejAmx3Nzc6A8nb3/x9294Cn83jnpkOxQ2a53EPTeH318wX7NmcObss2Il6iGLOzPbY89lyem57y+jMfLZgsTePq/kYc1uw8Gia2R67cVWgqEqQs9jbL4wmn3f3LGoAji/fxO8u7EA3wotF0qA8/qnH1cvyuiCOv1Fmem85f2dnlt/jWPw/e9bm88MsFnvMD+2wOLbLzqH5Umzm8zmLxYL5oflSbOY786XcJNsk60bfJVm2fz+sik7zfCEbjUkfN0nP8nNO9bNsVCZZP7TK2Wr1stUqaNnqZune0Xbmq72iJe1osp0EZHtAS3o/S3fvvN9+Jun5bI85i+V4NcBKb2fLeXurajZLTc/2jmGpAThXejdb7K9neb2T5URiJDJh7Bw/LxWaQyfMJTQZ+hQZqK5mBt0PeyFEU0KepseIKi18FVFk5mPxYDZtuRw4NTV9SjxvdKzzB9tFBwCiXDrS0hLVWW8YTUmLy2Kx2m4mu3xvsdJuJpq3C7N4sL64qlkiNKujlq+mr0hosvPne7ssZjv7AuO7LGynltDsHzRJc/IdkoLFHGbZ7CDV8DwWmeUpwTg8c/b2WArNahuKKFITVVVKpp3dXZgftqXY7O56vE5cuM30zJVITDR/zmLhHDkSzT9y3M5K1ObwcYcORG2I5x0+LqftzHHJ5c20n/mqw6n1ctrL7C7guCMH5jdhnlPXu6qdTLTOqrSkl2c7BthJVSmr6hAgKzCw3xnAfm9n+wID9SQGiOQl031z0nUzsBSZJDKTiMzc9wffnO/t7o85s3dsKTLpsWkgKzGpa5kWoUR60hKzyJk3AeaHd3LHloGoS9x0VbOdIzuNq5rND80Kq5p1JTRJIX9Tq5u1kZjQKmZdRWTG9iI3hGlWMbOVoRS2kZBvfzHRyJ53Bk4CXmxmD3X3h7U89sXA7czsNkQS8wjgR1vus1O6kJg+fsxj+bGNJR0h7B3bLe3RLKF2dCYvEpOelyM0K9GZGkKTvDEuitCUUthdU8F0tu1Bdj8pqZlbVO1nPl+wcGNnGa1hJVoDME96NJtFVYiyYgMwn8XV2eJDzBfGkcOrnQIcOWxxVbRIcCJR2a9+duTIjN1d58iR+UrVsyNH5rHozJfrJssSstXTos9JwejIge6Vy3o3W87rqGE35Df+h1VBgfwezNLzs437IbwXs2Q6LS7RsoPykizP9mQGxQITLdtbCgywjMREnxfLSAwQJDGQHVBzVWyS35ctji1lJhEZW+wuRWYlKpNIS0lUxpNoXLxdMq5M0jVzMt1V43+A+eFDy04AZjuzZY9maRHJSkqanSPzlU4AqoQmeQmX7gggefZlOwJInqt5UpMUooukJimM15GahCnLzToEJqGriExV2afsxW1Z2aKo0X5RjZOinszU+H+zCCmNPi4eBAfgCuBMM3t02wO7+66ZPQl4E1HJ4qXu/oG2+21LV1GYuhITGpEJEYiyH2lZFbMufvTZMWa6ZG+xWOmeee/63ZXumfeOHSvtnjlNk+hMpdAkpe35PEhoIO4QICU0QNxz0r7QrKQhJTUHKljVkJoD0Roy0RqLozXE0RpbrX4WicucpdLsxSIRt6uZJ21z4oJp0rZmnmpbc4TV3s4O70RRnMOHfKWzgCOH4sjNoajK2RGM3QUciQMh+5IT/9/NGUxz10kiNKvzoGg8maIxZ7LLouV5Y82EB7DTQpTX5XIe2fFkIEdoCgbPTM8vk5WV6ZSwwKq0RNOrUZfkf9Ugmll5AXIjMNH8YoEBGkkMRNU49wVmX2SWVcvSIpONyCT/O4rKlFVNm83nB9palUlJFelts9GZukIT7WN2oGezPqUmoYncpBmD6HRZbawPgYH+JQa6FZm6TOkFrVil8sqlRCY97w+7OLi7Xwhc2MW+mtBH9bEmkZgxi0wXNB0wMysrdclWNSuLzqSFpig6A4ERmqTKWVKoSTcM3jm0XxhKR2kSCUlJzfKYNaUmj2wkJ71hiNhEqyfVeZJxPeKG87HcRJ+T3s/K5QYiwTnMaucBacFJ5pGat5cWlAOdCRzsHQ2SMmay7mpPadExVs9V3sCaecvylu+v12+vTTsFLw3SwhKtZ7nLVubP0uvH/+fp7ZJ5q+unhQXIlZZken9gzH1xAQ5EXqJ1VuUFWGkLk43AAIUCE/1frU6WzEtHYlb/F0gMBItMaFSma9LdO4dEZ6Jl9YQG9ns4S55JVVEaqJYaOCg22UJ2qNwkhLaz6bv9SZ807Z2sTlWysUpMtF03L2inLDLOfmc+28p0r14F62rn0rQaWZ22MW0lJtpHsxu9+E3IOG+dbHQmtO1MNF0iNFCvylk2SgP5UpMXpYFcqaliHncSkGZFXpbzDorNnIPtawBmqdHVk7ffSTe3idwcstT4NIngWKo9ic+B3WWvaNE84/AsXrYUodWuniG/l7Qy2YF84dn/nN42vV76DFl2qJClBK3MK7k8OYGazsh2mpCQ16/ETkZuVnoWT31eEZq5k9QzLBKVaBtfmVcmLNH/g9KyOn9VVtLyko68wKq8AIURmGRe9H8/CgMl1cmgscRAtcikyZsXyuzwDnvXH6wy1oXQAEupSZ4dIVITrZccO/U7zql+tlw/JTfZ53aV3Cz3UVNy0oy5Y4GuulGu2wYmtPwUUv5pIzHQTGTq1jIpSkOfNU1Et4yzRFqE2eC9Z7RtA9O1xEB7kemqTukQP/y60ZtsdCZYaOBAGxqIB8RLCkJZcYHVjgFypWZ/95YqHHt6fs73KOv5DFbFJi0vwIFOA6J5q8uBpdwAK90+zpaFz5ScxIJziGMsmK8IDhbNYxYda3UMm1iQ0qIzSwmHrwpQQp7wRJ9Xz1ae+OQtg1UJWq6zOHj28+btH6NwUScUiQzsi8fqvNUEzTLb58lJ2fx098hZWYk+lwtL9LlYWqL/4eIS/S+XF6A0ChP9D5SY9OeCaAwcFJk0eVGZLtvLZCkTGmClDQ3QSmrS+03vO/vcayo3y+0DJWe5z4qOBaZOmwb7dcpUoeWfPgUm2r6exJTtUyKzGUxLZtZIVw33m/RO1pXERPtqVrWs/G3IcLdNtt1M7joV0Zm6QgMER2kgR2pWBtUIr362TE8NsVkd6G//c4jcRJ/3BWc22z+H6ejObLb/fRLJOcR1KaGJ58eJXbDDIWKRiUUHVse2WczS0pKa76vXeik981W5ya6bXZaWn/195c3LkZeceVXb9E1aMLLs5CzLrj/P9EC3k5nOE5QD82eLA+vMV7pFThV6M8ISfc6XlpV5GUGJPreXl+hzgcBAcSQGwqIxsCIyedXL6kRlVqIwqXYzIdEZOCg0QGmUJlpeLjWwLzbp50WV2ET7DJeb5fYBkgPlbXDqUjX+TR/0MbZLk5fAdcpAIeWcvgQmOn63ZZnpiYyVjhW3DWydzPTdVWCf8gLh0ZKm0ZiqY9T98TdtL1OHkOhME6EByqUmJ0qTHAsyUgPkdhKQR+arhERs8npDiz6vyg1E7W3mmZ7R4GB0Jk9wIExyoEJ0Ul+gSHbg4GCei0yVuUV2uR+8dxd7c7LVibOyU7Rt2fp55ElTW7KikUdaMlbnHyyM5a07z1y7vEjKcjpHUrLz52TEJD5knrBAmLQULc8bFyava+WVapshApP9XCIx0FxkuozKhApNtO7BKA1UR2ogXGzS2+c9O4oiN/vHqZYcCBed3GOUdNQx5kEjh+ptdV1tfKN9jKMcI8bPpGTGZv3LSAhtx4KpG9noSmCg/MdfdayuMoCQLpLbktezWYjQAIVRmmSdsigNFEgNhEVr8iioigarA3CWyQ3QSHAgkpz9z6s9pqUH7NzJWTfhENexyAzumZYdiKqqHcouzzhEti/9Rc6oxwfeUM2SbfPv7awQrS6rV5hJV5drS1YwStfNkZb9ZQcLa2nxSJgFrFckKVAsKsDKQJV5QhJ9rl6neEDLksgLrIwPkyswUB6FycyvIzHR57AG/6GDbBZFZ+Cg0AArbWiAwigNFFc/i9YLE5to+3y5Se+n6JlSJTnRccNEZ2W/1x4Ux6GrrvdFm7JS3TLOumqTQHOBifYtidlUhjeDEdLlwJVNq2TVaa8ylMRE2xWfq76iMnlVzfKiM6FCA7SP0iTs7KwUbtJtaqAiWgMHxSZdFQ1KozYH5Cbu8nk5XSE40fShlUJiWnIAZpnISFpeoundzHSx7Oyvsyo1QKX4LNezg/fmXo7gLNfPy/KWEaKw31xpOL/HZ2KebOSRJyr7y/KjAFkxgVU5Wa6T3V+mK/AiCYmmw9ZNCwvkS0v0uUBcoJ68ZD9DfYmBYJHpIypTJjSQH6WBg1IDBEVrovUOik20z3y5ifZTLDjpfZY9c0JEZzU99aWnLnmSVId1vqRtWr7p4yVs32WX6BjjKr/0gXoz2zCZ6VJC6tKmHUndhvahPZOFdLe87oygKCrT1wBWIUITrRcWpYnmFUgNFEZroIHYBLE6aKddv4AV4agWHKBScqJ55aID1bID1x3oXS1ab/W+2FnOL5YfyJGbWfH9umfzA+tnyZOjKsrkqSl54pCVjSxZ+QjaZ7JtzvhEs72D62cFJZpXvW12uzJhyU7bIrtuoLhAsbxkpw90X5cRkAYSE00Xi0xdDghLZsyZPKEBCqM0cFBqou2KozWQLzbp4+Q9d0IEJ2+fefsPedaWtc+pS7aL6TymICNljOWlKwxbbomOPx2JEftMSmbMbBBh6bLBe5NCe51ulUPHi2mTGUTbj++NRlHbmaZCA/WkZmW9TLQGaooNNJQbDkZvcgQHOCg5sBSdrORE88pFJzsPwOY5BeIcmYF8cSmSn/1t8iWoeH95+yiWozLKxKkteXIRQp6AHFynuMpT0aCrRfvN21eVqOTNywpLNK9YWqBCXKBcXqCWwEC5xEB9kekiKlMlNJAfpckeP51nl0Vr4OCzoUpuov3XE5z9fYeJTvaYbZ7ZeQOBTomuyitNXzB2XV4Ji/A0L69E6Zi2wDhWWl16G5iUzDRl3b1vtYky1B0Pps5gl0NnCutoK1OHIqGBg2kNkZqi9eqKDdSQm6r2NktWIziQJzmQG8lJyIgOHIzo7M8/KDzR/EO5hVvIFx+I2u5nx89JUywt16XWqf+bzIpSlqHu5iLhKF6/XHDK5AYOyslyftF1zJWXAFmBA8ICOdIC1eKSNy/vxUBdgYFKiYnmdReRSZMrKzlCA5RGaWA1j6sjNtF+6svN/rbFEZz941aLzv7x6glPUXoSxjqGWlu6qv0wdBklSkOIBLUTmLGVV0Q507paZoNmNF1WhWo6iOUmZgxdntc60RkoHlSzTGosk94yqYnm1xObJF1pcuWmEwokB3JFBwoiOpBbfS2hSHqiZfnis7+8WICgWILSJN+kTIpCCI32tKVKMKooEpAD65Wc15B18q5nNL9AuApELEhYEkLEBYLkBfIb3VdFYSBMYorWC43KzA4dYi+TlhChSdaDaqmJjhMuNtF+wuUGyO36OUtVJCfNIvWd6j5L0/JTlp5toWlZJKFOmSShXvW00Nom1ZEJCUx3mNnDgOcA3wjcxd0vyVnnVODlwE2JKkofdfcXpJb/LPBEYAG80d2fZmaHgd8FTgf2gJ9397dVpWcrrtwQmdW2ZBBtM4eyNNcZDDOEMqEBCqUmJEoTzU9JSQdiAxVyA91VTcvloOhAtezAqvBAsfRAfsE3r3rbwXXKJejg+uVSFMKMa5f76oO26Wu6r6JzvLpOSQG8JFqUKyrL7WoIS9n8QHGBQHlZ7iNMYqL5YSLTBaFCk6wL+VIDzcQGyuUm2m+54CRURXJW91kd1cmjjfw0IStMbVhHerM0KX+kaVadvs6L2vbiAhskL16/580OeD/wQ0TiUcQu8BR3/0czOwH4BzO7yN0/aGb3As4Evs3drzOzr423+UkAd/+WeN5fmtmd3b208dqkrqTZsG9Rus5U2mQY9TsNGF9GMcS1LBIaaBalSQiJ1pRuky001ZQbqIjedCo5CfmyA2XCA2R61MqKD2TkJyFAgpb7nOfss8H395z7IdpXH1GyfJqk++A+arbPCKjKViopUCwqCWWRxjrSUrFNUXfHdQQG6klM2fp1yYvOQLHQAMFSA+FiA+VyA9WCEx0jM2BrgOjkpbWM7PdoWzhPs7i++rcxhIBAt98zj3ZV6Ju8oA1vB9KlvGx7tC4Ed/8QRG3ZS9a5Argi/vxFM/sQcEvgg8BPA+e5+3Xx8ivjze4I/E0yz8w+TxSl+fuy9ExKZqoYKgPJo6tMpXkjvG7lBcIbyYVkGFXfq+uoTChFQgP5UZrldjWjNelt8rbLi9pAfgGsUnBgzZKTJjW2TgFL8YEC+UmolqDlPkPalQQ04K8tAUPRsMMACJCSleME3Csh1SGr1mkgLVA+TkuhvEChwEC5lDQRmU4HycwRGgiTGigXG6gnN1D8jAiRnP1jhkV00jSVnuI0lPT8N9CzqQv6KqS36821XsP1Oo31JS/jwcxOA+4EvCeedXvg7mb2y8C1wC+4+8XAPwMPMbNXAqcC3xn/3xyZsZlNMuRaRtsf0Rjediz324HEQLXIZMeYqUtZdAaqhQaKv2uR1ETLwsTmwPZ5Ba0WggMBkgNrEJ08quUnYUWCoEKE8ggrVJZJU5/Ukow6BLanWaFOe62QdUPuq4D9NJYXKBUYaCYxfVEUnYHiiAus5pV1xQbqy81yvUDJgVXRiY5Z/XyqE92pIitCURq2r3DbdXvkNgNR1u1drE51sdBrO9QL1SY41nTA5pPMLN3W5ai7H00mzOwtwM1ytjvX3d8QehAzOx74U+C/u/s18ewd4GuAuwJ3Bl5jZl8HvJSoHc4lwCeAd0LJ4GmpnU0Hs8m8Fem2s4BxvvWAbt98jCnzKBMaKI/SQLnURMuLxSa9feE+CqqlLbcvERwoLwQGiQ4MJDtlZMbs6YgD0jRF2hS+m2wbel8E7rvsfl2uUyUv0EpgouUBgtWiepnN54XfNXkB00RqoJ7YFO0nLx/Pi2KESg5UP3OyshOlo35BOStACZvae1nXtJGTNG26Qa7bxqVOOWxM5Y81cpW7n1600N3v0/YAZnaISGRe4e6vSy26DHiduzvw92a2B5zk7p8Bnpza/p3AR6qOs5VXr4p1vpXprk/4ZhnNGN6AQHhG0jYqU4cQoYHyc1IlNdE6+e1rsvtICJIbCBIcWJUcCBQdCCuIJt+pjvR00XPbbnF7nq2jqyhCE3GteewQaYFAcYFKeYFw+ehbZBLKhAbKozRQLTVQLTbp/SzXK5KkgN7JlutWPD+ayE6WPPmB7grjeRSJ0rrp8zuW0cU4LW0a49cts22iuCy8uO3KUFjUoOb3gQ+5+29kFv8ZcC/grWZ2e+AwcJWZ3QAwd/8vM/s+YNfdP1h1rEld0aE7AAihr7c8bTOppplNn29CoHuJGSKTqorSQD2pidYrqQYXIjcQJDgQLjlQXdg8IH+hhdn0920b7QmNLG0jXUXSGp7bUFlZ2SZUXCBIXpb77VBi6uwvhBChgeIoDVRXIVuulx1rK1BuqvZd9iwoapMS+rwpa3fU1QCIRVKUx1AS0ZZ1DBbZRY9hTct9dcsD63xZOnXM7AeB/wucDLzRzN7r7vczs1sAL3H3BwJ3Ax4NvM/M3htv+ovufiFRdbKXmtn7geuBx7i7xz2YvSmO1Fweb1/JpGSmS8YSWu4yE2ybMTXNdPp8K1IncxnybUuI0ECY1ETrhYlNep/765fsO1BwoLwQmSc6EFZYzY12NRWPvHMzqqpvE6BD6WsiK8tt60hLQs12RXWFo4+2MbNDO0GdAFQJTbSv/fs/VGygntxAseDk7TvkOE17JVtu36KAHNoBw9RHhe+Svrow7uLldNPnvsSlHe7+euD1OfM/DTww/vy3QG7IyN2vBx6VM//jwB3qpmccJfpQBh40s4h1vJXpMmNdZzgXmmU2dTOaWoJU0vg/S1VVszShQgPhUhOtGy426X3vb1NxjKICYUnD96qCZ5HsQP0Cb+n57zP6MrZqagNFmtoIyoF9NRGWhIYdIjSJmKy7gX8Ryb0fcg1CxQbqyQ0U58dlkpN3nAPbVx235vOmrBey5T43ZWyRkdFHrZm2LyibCsvUqqE5NsQ4M6NiUlcsqmY2zXBulj7f+gwZ1oX1vimpHUauITJQUZDOoY7QQD2pidavJzbpY6QJOl4DyVkeM6DQWiY8K/tqUZiue/1WGEmBtiu6lJLCY7SRlTQte3JrU+VrHSITGp1JqCM10f7DxQbqVyFbblfx+2orO3XSAt0XqEPkaKoMVWW/S1FoG12ZmrSIYnQlazJk6LnrN0pDhnihXUbUKNpTU2TWie8ugoVmf5v6YpM+Xppaxy4raNboxji04BsqPbnHUJWzTuhMUvLoqAvqLtqsrDMik+TnTaQGmolNQojgQHPJWW4fkMdXCU9VWkIITe+BY468je4QrEMAuqwC1qqMMuIyg1hlWjJjs42ox7qOMHfXmXAXGdi65WW5bcMMqdVb/QbUjdKsbttcbNLH3t9Hw+/ekeikaVqQbiNBm06vclJED+PmdNnovq3I2M68UXqaSA0czJ/qiHtRnthWclb21aHwHNh3zZcUegM/HH22Tenquk5dWtzH2ZvZOtEvPIex1qnt8y3RmEK/0EFd2ZaZ07pFpkuqunoO20fDqmll9CA6ZfRVYB9CkgaRj6b0Ndhnii4lpkuaCg2sPnfqig3k51l1I5Nl+Wao6Cz3FZiHN4madF1AritHm8yQDeP7ks6py4qoZpyl9gLMxisaVWxC/dSV/Xac4XUS+ekgw2orMV3cn02qnOXvp73UrO6vo+hNHlUF4B5kpymTEou+WIOw5DFWiUnTRmgSsvlIE7mB4vysSfXLkPy1rvDAMNXHDqRhwi+vxsY6o2BdS8qUX2JuO+MpIfTIptR7XWsm0dOPutMIUEcZWVcZ2FhF23ePdSY0q/vtUW6yhBaeRyQ9k2cgYcljChKTJv1b6CLtXclNQlme16adWd08uYn8rBxvTc/ErqRpKKZQzW4d0ZPNlRVjd28zOsdqyvjv8DRmkxOTMWUi63j71Mf37SOT6zJT61pi+pCCvoRm9Rj7haBexaaMOgXwbRafEYlKGeuUGNs51EsHANnfQh9yk6ZP0UnTRecaXeTtbYUohDE9x8fIkNW4+hSUsb6gFAeZ1JUys63PVIYKh6+lB5OeMsQ+Mru+MrnBJKBjRiE2VbQt0I9FhiYiJlMgEf4+ezXrQ27SVOVNbWUnoUm+2kfvgmNrD7EOuYLxfe8uWGfkZJNExYGFIjOiD8ZeB3cIKZx6GLnPzG+0Bf4OaNNL26iRRGws6Shm3901F/0u+opKheZjXUlPmq7y5zF3ub6JklHE2KptbZKgiHpM68rb+CVhXYwlQrXujHtdmee6MsV1FfD7rmIWQlcdG4jtoYvG9O3TcPC3s47xaKp+K32flyZ5YB8ClMfYCtGiW4aSEj2fpss4SsQbxlhEoy5Dv1Ea4gG17kxziMxyDCIjRFOS38zQUpOm6De1zkE36+Ql6zp3Xeen65Ij0ZwxR0O2RU7cYbGncWamg9rMFDK0iBQx9Bu0bX3DI4ERm0bfbU26oOp3t07ZSdM0Pxr6HI+5oFxGqIRN9futm6Gfp2L86JfUE2OVi6YMLSV5jOFBMIZMdgriMobzJDaLdbc16YLQ3+pQ0pOly9/tmK9L14zh2TQkU8jvp/DcFOFM6hdnZhsnCX0zRgkpYowPgLFlylPKgMd27sR2MHRbky5o+jsfiwTlMbb8YAr3QQhjO6/rZkrPxD7ZVTUzsS6mJBZNGaOQ5DHmB8AUM+cxn08h0oTeq1Ms7HaVd4xZirpCedZwTPEZJ8bNNEqeCWZbIQTrZioCUsSUHkpTzsSndJ6FaMtU25l0wRD51DYI1FiZ8nOpDnqGbS7TLsVuKFOXiyZMPZPZhIfB1K/BJBjLQJtpNF5Op3T1O9oEKarDJuShoj/0fCrG3dSb2dAJqEPUZmZSSd5oNjVz2bSH6qZep7UwRvnomnV+R4lTMH3/brdNlkS/6DmzXZjZw4DnAN8I3MXdLylY78nA2YAD7wMe6+7XmtkZwPOAGfAl4Cx3v9TMbgW8DLgRMAee4e4XVqVnC57U24UylM2TkSy6xjXZBiGZCn1eC4lSLaaYj2yTgE3x+oyaDX4OOIN0APB+4IeA3y1awcxuCfwccEd3/4qZvQZ4BHA+8DvAme7+ITP7GeCZwFnx/9e4+++Y2R2BC4HTqhKzuVe3AGUQ42TTBSQP3YuBbPBDSHRI3/eJZGlwlGduEcr3R427fwiiGlMV7ABfZWbHgBsAn052Adww/nxiwPzKg0wKZWbrZRslIxTdi4HooSQ2gSHvY4mUGBrl46Im7n65mf068EngK8Cb3f3N8eKzgQvN7CvANcBd4/nPAd5sZj8LfDVwn5Bj6e4cCZKG9SABqYkeYEIMzzb8DjdB2LbhOm0AtoHjFe7tNdrsJDNLt3U56u5Hkwkzewtws5ztznX3N1Tt3MxuDJwJ3Ab4PPAnZvYod/8j4MnAA939PWb2VOA3iATnkcD57v5/zOy7gT80s29299JvOK1fnpkK/RuOZKMD9EAVQkwN5Vsih00UjxFxlbufXrTQ3YOiIiXcB/iYu38GwMxeB3yPmb0J+DZ3f0+83quBv4o/Pw64f3z8d5nZccBJwJVlB1LuIVaQTKwZPcA3Dj18148f0xglQqwD5W+iBp8E7mpmNyCqZnYGcAnwOeBEM7u9u38E+D7gQ6ltzgDON7NvBI4DPlN1IJWkaqCC/hYi2ZgUetBuJ9t63SVx02Zb71vRLe6wu1hvb2Zm9oPA/wVOBt5oZu919/uZ2S2Al7h7UoXstcA/ArvAPxFVZds1s58E/tTM9ojk5ifiXT8F+L24S2cn6rLZq9IzrZKamYRCSDAmhh7YQvSDfltC7GNzlQ/Xhbu/Hnh9zvxPAw9MTT8beHaN7T8I3K1uegYpFZrZ84DvB64H/p1oEJ3PD5EWUYHEYWNRQUgIIcQ6kGiIPhmqpHoRcE4cavpV4Bzg6Z3sWYVvMTEkFUIIIbpE8rA9DFHNbGwMUvJP9TMN8G7gh4M2NJOsiM6QRIipoILJZuCL7RnBfmzoNyTE5jIGM/gJom7ZxIYiaRDrQIUVMXZ0jwrRA1s+ZIcDi2bjzGwMvclMyGA7ZnYuUQ8HryjZz+OBxwOcepMTe0hpt6jgLrYBFcqEEEIUsuWCIdZLbzJTNdiOmZ0FPBg4o6zbtXg00qMA3/F1p7hkQWwjkgchhBClSCDEljJUb2b3B54G3MPdvzxEGsR2IzkQQogtR4V/sQk47G55c7yh2sy8EDgCXGRmAO929ycMlBZRgAr8QmwZKtwJIbYFlXE2hqF6M7ttk+3MVMAWQpSgwrgQQkwXlfFEA8bQm5kQYmgkAUIIIdJILCaBA9ve67tkRogukRQIIYQIRcIgRGskM6I7VJAXQghRBxXmhRAtmZjMmArMQggh+kEFayHExHCH3UXhCCdbwcRkRgghJoQKx0II0R96wS2QzAgh6qDCuRBCiDwkFoPgDru7Q6diWCQzQrRBhXshhBBDIokQW45kZltQoVsIIYRYRSIgxOSZlsyYqVAuhBBC9IkK+EJMisXe0CkYlmnJjBBCCFEXFc6FEGJjkcwIIcRUUKFcCCG6RflqbczsecD3A9cD/w481t0/n7Pex4EvAgtg191Pj+d/DfBq4DTg48DD3f1zZnYi8EfArYgc5dfd/Q+q0iOZEUJ0jx4OQggh2qJnSSVRb2ZrH2fmIuAcd981s18FzgGeXrDuvdz9qsy8ZwB/7e7nmdkz4umnA08EPuju329mJwMfNrNXuPv1ZYmRzIjtQZmiEEIIUY2el6IEd39zavLdwA/X3MWZwD3jzy8D3kYkMw6cYGYGHA9cDVR2PD0tmTHTD0wIIYQQ00RlGLF5/ARRlbE8HHizmTnwu+5+NJ5/U3e/Iv78H8BN488vBC4APg2cAPyIu1d2bzAtmRFCCCHEdiMhEGKJA4tFo2pmJ5nZJanpoynZwMzeAtwsZ7tz3f0N8TrnEkVOXlFwjO9198vN7GuBi8zsX9397Svpd/dYdgDuB7wXuDfw9fE273D3a8q+iGRGCCGE2BYkAkKIiKuSBvl5uPt9yjY2s7OABwNnuHuuTbn75fH/K83s9cBdgLcD/2lmN3f3K8zs5sCV8SaPBc6L93epmX0M+Abg78vSIpkRQgixvahwL4SYMu5r7wDAzO4PPA24h7t/uWCdrwZm7v7F+PN9gefGiy8AHgOcF/9/Qzz/k8AZwDvM7KbAHYCPVqVHMiOEENuGCvBCCCGa80LgCFE1MIB3u/sTzOwWwEvc/YFE7WBeHy/fAf7Y3f8q3v484DVm9jjgE8DD4/m/BJxvZu8DDHh6Tk9oB5DMCCE2CxXUhRBCiN5w99sWzP808MD480eBbytY77NEEZi87e9bNz2SGbHZqGArhBBCiITZfOgUdIp74w4ANoZpyYy6ZhZCCCGEmA4bJg9ifExLZoQQQgghRDGSB7FlSGaEEEIIIUKRLIgR4Q67u5XjSm40khkhhBBCjBOJgxCiAsmMEEIIMUVU0BdCCMmMEEKILUGFfyHEhuHuqmY2dAKEEEIMjAr5QgghJopkRgghQAV6IYQQk0TjzAghthMV3oUQQggxcaYnMyqACSGEEEIIIZiizAghhBBCCCE0zgwwGzoBQgghhBBCCNEERWaEEEIIIcSk8B0VYUWE7gQhhBBCCNEKycUwuDuLxXZXM9OdJ4QQQggxYSQSYpvR3S+EEEII0QBJhBDDo1+hEEIIIdaGBECIDnHYPaZqZkIIIYTYACQKQoi+MbNfAs4E9oArgbPc/dM56/0VcFfgb939wan5rwBOB44Bfw/8lLsfM7OnAj8Wr7YDfCNwsrtfXZYe5XpCCCG2BhX2hRCbhLuze2yx7sM+z93/J4CZ/RzwLOAJeesBNwB+KjP/FcCj4s9/DJwN/I67Py/eBjP7fuDJVSIDkhkhhBAxKugLIYSowt2vSU1+NeAF6/21md0zZ/6FyWcz+3vglJzNHwm8MiQ9enIJIbYeFeKFEEKIcMzsl4EfB74A3KvhPg4BjwZ+PjP/BsD9gSeF7EdPcCG2GBXihRBCiOniwF6zcWZOMrNLUtNH3f1oMmFmbwFulrPdue7+Bnc/FzjXzM4hko5nN0jDbwNvd/d3ZOZ/P/B3IVXMQDIjhAr0QgghhNg2rnL304sWuvt9AvfzCuBCasqMmT0bOJmD7WkAHkFgFTOYmMy4mQqeQgghhBBCDISZ3c7d/y2ePBP415rbnw3cDzjD3fcyy04E7sF+BwGVyAyEEEIIIYSYIsP0Znaemd2BqGvmTxD3ZGZmpwNPcPez4+l3AN8AHG9mlwGPc/c3AS+Ot3uXmQG8zt2fG+/7B4E3u/t/hSZGMiOEEEIIIYQIwt0fWjD/EqJulpPpuxesV+gf7n4+cH6d9MzqrNw1ZvYUM3MzO2nIdAghhBBCCCGmx2CRGTM7Fbgv8Mmh0iCEEEIIIcRUcYdFs97MNoYhIzO/CTyNgoF2hBBCCCGEEKKMQSIzZnYmcLm7/3Pc8EcIIYQQQghRA3dncWx36GQMSm8yUzbYDvCLRFXMQvbzeODxAKd+7U06S58QQgghhBBi2vQmM0WD7ZjZtwC3AZKozCnAP5rZXdz9P3L2cxQ4CvAdd7iNqqQJIYQQQgghgAGqmbn7+4CvTabN7OPA6e5+1brTIoQQQgghxGQZZpyZUTFo18xCCCGEEEII0ZTBB81099OGToMQQgghhBBiegwuM0IIIYQQQoj6OLBYqJqZEEIIIYQQQkwOyYwQQgghhBBikqiamRBCCCGEEBMkGjRT1cyEEEIIIYQQYnIoMiOEEEIIIcQUcVjsKjIjhBBCCCGEEJNDkRkhhBBCCDEtZirCigjdCUIIIYQQYv1ISFoTdQCwO3QyBkV3kRBCCCGE6AYJilgzajMjhBBCCCHqM9s5+Cc2HjP7JTP7FzN7r5m92cxukbPOveLlyd+1ZvYDmXV+y8y+lJr+zdT6HzGzz4ekR3edEEIIIYTYR1IyGdydxWLtvZk9z93/J4CZ/RzwLOAJmXS9Ffj2eJ2vAS4F3pwsN7PTgRtntnlyavnPAncKSYwiM0IIIYQQm0he5CTkT4gS3P2a1ORXA16xyQ8Df+nuXwYwsznwPOBpJds8EnhlSHp0xwohhBBClKECvhArmNkvAz8OfAG4V8XqjwB+IzX9JOACd7/CzPL2fWvgNsDfhKRFv04hhBBCbB8SFLEJuLM4dqzJlieZ2SWp6aPufjSZMLO3ADfL2e5cd3+Du58LnGtm5xDJybPzDmJmNwe+BXhTPH0L4GHAPUvS9gjgte4eVH9Ov2QhhBBCbCYSFiGKuMrdTy9a6O73CdzPK4ALKZAZ4OHA6909Ma47AbcFLo2jMjcws0vd/bapbR4BPDHw+JIZIYQQQkwAiYkQB3B3dq9f7zgzZnY7d/+3ePJM4F9LVn8kcE4y4e5vJBXxMbMvpUXGzL6BqGOAd4WmZ9o5QxcZ2952DzQkhBBCrIXsM7vo+StpEWLsnGdmdwD2gE8Q92QW91D2BHc/O54+DTgV+H819v0I4FXuXtWpwJKJ5RjWfSZXZ38SHyGEECKfus9nSYsQk8TdH1ow/xLg7NT0x4FbVuzr+Mz0c+qmRzlJHaoyXsmOEEKITUXyIcT4cNhb/zgzo0I5U5fkZfQSHCGEEFNGEiOEGDHKofom/RCQ2AghhBBCCNEZVqN9zeCY2ReBDw+djg3jJOCqoROxYeic9oPOa/fonHaPzmn36Jz2g85rNbd295OHTkQZZvZXRNeyLle5+/27Ts8QTE1mLinrE1vUR+e0e3RO+0HntXt0TrtH57R7dE77QedVbAqzoRMghBBCCCGEEE2QzAghhBBCCCEmydRk5ujQCdhAdE67R+e0H3Reu0fntHt0TrtH57QfdF7FRjCpNjNCCCGEEEIIkTC1yIwQQgghhBBCACOVGTO7v5l92MwuNbNn5Cw/Ymavjpe/x8xOGyCZkyLgnP4PM/ugmf2Lmf21md16iHROiapzmlrvoWbmZqZeYyoIOadm9vD4Xv2Amf3xutM4RQJ+/7cys7ea2T/FecADh0jnVDCzl5rZlWb2/oLlZma/FZ/vfzGz71h3GqdIwHn9sfh8vs/M3mlm37buNE6NqnOaWu/OZrZrZj+8rrQJ0RWjkxkzmwMvAh4A3BF4pJndMbPa44DPufttgd8EfnW9qZwWgef0n4DT3f1bgdcCv7beVE6LwHOKmZ0A/DzwnvWmcHqEnFMzux1wDnA3d/8m4L+vO51TI/BefSbwGne/E/AI4LfXm8rJcT5QNj7DA4DbxX+PB35nDWnaBM6n/Lx+DLiHu38L8EuozUcI51N+TpM84leBN68jQUJ0zehkBrgLcKm7f9TdrwdeBZyZWedM4GXx59cCZ5iZrTGNU6PynLr7W939y/Hku4FT1pzGqRFyn0L0wP1V4Np1Jm6ihJzTnwRe5O6fA3D3K9ecxikScl4duGH8+UTg02tM3+Rw97cDV5escibwco94N3AjM7v5elI3XarOq7u/M/nto+dUEAH3KsDPAn8KKD8Vk2SMMnNL4FOp6cviebnruPsu8AXgJmtJ3TQJOadpHgf8Za8pmj6V5zSuWnKqu79xnQmbMCH36e2B25vZ35nZu81sI0Yv7pmQ8/oc4FFmdhlwIVHhRjSnbp4r6qPnVAeY2S2BH0TRQzFhdoZOgBgXZvYo4HTgHkOnZcqY2Qz4DeCsgZOyaewQVd25J9Fb2beb2be4++eHTNQG8EjgfHf/P2b23cAfmtk3u/ve0AkTIouZ3YtIZr536LRsAM8Hnu7ue6rgIqbKGGXmcuDU1PQp8by8dS4zsx2iahGfXU/yJknIOcXM7gOcS1Qn+bo1pW2qVJ3TE4BvBt4WPyBuBlxgZg9x90vWlsppEXKfXga8x92PAR8zs48Qyc3F60niJAk5r48jrlfv7u8ys+OAk1C1k6YE5bmiPmb2rcBLgAe4u5777TkdeFX8nDoJeKCZ7br7nw2aKiFqMMZqZhcDtzOz25jZYaLGqBdk1rkAeEz8+YeBv3ENmFNG5Tk1szsBvws8RO0Qgig9p+7+BXc/yd1Pc/fTiOp3S2TKCfnt/xlRVAYzO4mo2tlH15jGKRJyXj8JnAFgZt8IHAd8Zq2p3CwuAH487tXsrsAX3P2KoRM1dczsVsDrgEe7+0eGTs8m4O63ST2nXgv8jERGTI3RRWbcfdfMngS8CZgDL3X3D5jZc4FL3P0C4PeJqkFcStSw7RHDpXj8BJ7T5wHHA38Sv6H5pLs/ZLBEj5zAcypqEHhO3wTc18w+CCyAp+rtbDmB5/UpwO+Z2ZOJOgM4Sy+IijGzVxJJ9UlxO6NnA4cA3P3FRO2OHghcCnwZeOwwKZ0WAef1WUTtY387fk7turu6vC8h4JwKMXlMzyshhBBCCCHEFBljNTMhhBBCCCGEqEQyI4QQQgghhJgkkhkhhBBCCCHEJJHMCCGEEEIIISaJZEYIIYQQQggxSSQzQgixoZjZjczsZ4ZOhxBCCNEXkhkhhNhcbgRIZoQQQmwskhkhhNhczgO+3szea2bPGzoxQgghRNdo0EwhhNhQzOw04C/c/ZuHTosQQgjRB4rMCCGEEEIIISaJZEYIIYQQQggxSSQzQgixuXwROGHoRAghhBB9IZkRQogNxd0/C/ydmb1fHQAIIYTYRNQBgBBCCCGEEGKSKDIjhBBCCCGEmCSSGSGEEEIIIcQkkcwIIYQQQgghJolkRgghhBBCCDFJJDNCCCGEEEKISSKZEUIIIYQQQkwSyYwQQgghhBBikkhmhBBCCCGEEJPk/wf5h71KuSGMtQAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 1080x360 with 2 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import scipy.io\n",
    "from sklearn.model_selection import train_test_split\n",
    "\n",
    "# Load the .mat file\n",
    "mat_data = scipy.io.loadmat('NLS.mat')\n",
    "\n",
    "# Following is the code to plot the data u vs x and t. u is 256*100\n",
    "# matrix. Use first 75 columns for training and 25 for testing :)\n",
    "\n",
    "# Access the variables stored in the .mat file\n",
    "# The variable names in the .mat file become keys in the loaded dictionary\n",
    "x = mat_data['x']\n",
    "t = mat_data['tt']\n",
    "u1 = mat_data['uu']\n",
    "\n",
    "# Use the loaded variables as needed\n",
    "print(x.shape)\n",
    "print(t.shape)\n",
    "print(u.shape)\n",
    "\n",
    "X, T = np.meshgrid(x, t)\n",
    "# Define custom color levels\n",
    "c_levels = np.linspace(np.min(u1), np.max(u1), 100)\n",
    "\n",
    "# Plot the contour\n",
    "plt.figure()\n",
    "plt.figure(figsize=(15, 5))\n",
    "plt.contourf(T, X, u1.T, levels=c_levels, cmap='coolwarm')\n",
    "plt.xlabel('t')\n",
    "plt.ylabel('x')\n",
    "plt.title('Schrondinger-Equation')\n",
    "plt.colorbar()  # Add a colorbar for the contour levels\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "aada34db",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([1, 1, 256])\n",
      "torch.Size([1, 40, 256])\n"
     ]
    }
   ],
   "source": [
    "print(test_tensor.shape)\n",
    "prediction_tensor = torch.zeros(1, 40, 256).float()\n",
    "print(prediction_tensor.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "b6257d84",
   "metadata": {},
   "outputs": [],
   "source": [
    "with torch.no_grad():\n",
    "    hidden_pred = torch.zeros(1, batch_size, hidden_size)\n",
    "    prediction, _ = rnn(test_tensor, hidden_pred)\n",
    "    prediction = prediction.view(1, 1, 256).float()\n",
    "    prediction_tensor[:, 0, :] = prediction\n",
    "    for i in range(39):\n",
    "        hidden_pred = torch.zeros(1, batch_size, hidden_size)\n",
    "        prediction, _ = rnn(prediction, hidden_pred)\n",
    "        prediction = prediction.view(1, 1, 256).float()\n",
    "        prediction_tensor[:, i+1, :] = prediction"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "0fe309e4",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(201, 256)\n"
     ]
    }
   ],
   "source": [
    "# true solution\n",
    "h_true = np.abs(u1)\n",
    "h_true = h_true.T\n",
    "print(h_true.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "023483af",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(40, 256)\n"
     ]
    }
   ],
   "source": [
    "# exact\n",
    "u_test_full = h_true[161:201, :]\n",
    "print(u_test_full.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "67721b1a",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([1, 40, 256])"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "\n",
    "k1 = (prediction_tensor - u_test_full)**2\n",
    "u_test_full_tensor = torch.tensor(u_test_full**2)\n",
    "prediction_tensor.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "1687faef",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Relative Error Test:  37.16152236941 %\n"
     ]
    }
   ],
   "source": [
    "# Compute the relative L2 error norm (generalization error)\n",
    "relative_error_test = torch.mean(k1)/ torch.mean(u_test_full_tensor)\n",
    "\n",
    "print(\"Relative Error Test: \", relative_error_test.item(), \"%\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "id": "3520a361",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor(15.4579, dtype=torch.float64)\n"
     ]
    }
   ],
   "source": [
    "R_abs = torch.max(prediction_tensor-u_test_full)\n",
    "print(R_abs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "445dda92",
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "\n",
    "a = prediction_tensor\n",
    "b = u_test_full\n",
    "# Assuming 'a' is your predicted values (model's predictions) and 'b' is the true values (ground truth)\n",
    "# Make sure 'a' and 'b' are PyTorch tensors\n",
    "b = torch.tensor(b)\n",
    "# Calculate the mean of 'b'\n",
    "mean_b = torch.mean(b)\n",
    "\n",
    "# Calculate the Explained Variance Score\n",
    "numerator = torch.var(b - a)  # Variance of the differences between 'b' and 'a'\n",
    "denominator = torch.var(b)    # Variance of 'b'\n",
    "evs = 1 - numerator / denominator\n",
    "\n",
    "print(\"Explained Variance Score:\", evs.item())\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c567031c",
   "metadata": {},
   "outputs": [],
   "source": [
    "R_mean = torch.mean(torch.abs(prediction_tensor - u_test_full))\n",
    "print(R_mean)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2ce3f07b",
   "metadata": {},
   "outputs": [],
   "source": [
    "prediction_tensor = torch.squeeze(prediction_tensor)\n",
    "h = np.abs(u1)\n",
    "h.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "a2d5c775",
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "import numpy as np\n",
    "import torch\n",
    "\n",
    "# Create the figure and axis objects with reduced width\n",
    "fig, ax = plt.subplots(figsize=(5, 5))  # You can adjust the width (7 inches) and height (5 inches) as needed\n",
    "\n",
    "# # Make sure the font is Times Roman\n",
    "# plt.rcParams['font.family'] = 'Times New Roman'\n",
    "\n",
    "# # Perform the prediction\n",
    "# with torch.no_grad():\n",
    "#     prediction = lem(test_tensor)\n",
    "\n",
    "final_time_output = prediction_tensor[-38, :]\n",
    "final_out = final_time_output.detach().numpy().reshape(-1, 1)\n",
    "final_true = h[:, -38].reshape(-1, 1)\n",
    "print(final_out.shape)\n",
    "print(final_true.shape)\n",
    "\n",
    "# Plot the data with red and blue lines, one with dotted and one with solid style\n",
    "ax.plot(x.T, final_out, color='red', linestyle='dotted', linewidth=12, label='Prediction')\n",
    "ax.plot(x.T, final_true, color='blue', linestyle='solid', linewidth=7, label='True')\n",
    "\n",
    "# Set the axis labels with bold font weight\n",
    "ax.set_xlabel(r\"${x}$\", fontsize=26, color='black', fontdict={'weight': 'bold'})\n",
    "ax.set_ylabel(r\"${|u(x, t)|}$\", fontsize=26, color='black', fontdict={'weight': 'bold'})\n",
    "\n",
    "# Set the title with bold font weight\n",
    "ax.set_title(r\"${t = 1.28}$\", fontsize=26, color='black', fontweight='bold')\n",
    "\n",
    "# Set the number of ticks for x-axis and y-axis to 3\n",
    "ax.set_xticks([-5, 0, 5])\n",
    "ax.set_yticks([0, 2, 4])\n",
    "\n",
    "# Set tick labels fontweight to bold and increase font size\n",
    "ax.tick_params(axis='both', which='major', labelsize=20, width=2, length=10)\n",
    "\n",
    "# # Set the fontweight for tick labels to bold\n",
    "# for tick in ax.get_xticklabels() + ax.get_yticklabels():\n",
    "#     tick.set_weight('bold')\n",
    "\n",
    "# Set the spines linewidth to bold\n",
    "ax.spines['top'].set_linewidth(2)\n",
    "ax.spines['right'].set_linewidth(2)\n",
    "ax.spines['bottom'].set_linewidth(2)\n",
    "ax.spines['left'].set_linewidth(2)\n",
    "\n",
    "\n",
    "# Increase font size for x and y axis numbers\n",
    "ax.tick_params(axis='both', which='major', labelsize=24)\n",
    "\n",
    "# Set the legend\n",
    "# ax.legend()\n",
    "\n",
    "plt.savefig('RNN_1.28_20.pdf', dpi=500, bbox_inches=\"tight\")\n",
    "\n",
    "# Show the plot\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "18a30486",
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "import numpy as np\n",
    "import torch\n",
    "\n",
    "# Create the figure and axis objects with reduced width\n",
    "fig, ax = plt.subplots(figsize=(5, 5))  # You can adjust the width (7 inches) and height (5 inches) as needed\n",
    "\n",
    "# # Make sure the font is Times Roman\n",
    "# plt.rcParams['font.family'] = 'Times New Roman'\n",
    "\n",
    "# # Perform the prediction\n",
    "# with torch.no_grad():\n",
    "#     prediction = lem(test_tensor)\n",
    "\n",
    "\n",
    "final_time_output = prediction_tensor[-3, :]\n",
    "final_out = final_time_output.detach().numpy().reshape(-1, 1)\n",
    "final_true = h[:, -3].reshape(-1, 1)\n",
    "print(final_out.shape)\n",
    "print(final_true.shape)\n",
    "\n",
    "# Plot the data with red and blue lines, one with dotted and one with solid style\n",
    "ax.plot(x.T, final_out, color='red', linestyle='dotted', linewidth=12, label='Prediction')\n",
    "ax.plot(x.T, final_true, color='blue', linestyle='solid', linewidth=7, label='True')\n",
    "\n",
    "# Set the axis labels with bold font weight\n",
    "ax.set_xlabel(r\"${x}$\", fontsize=26, color='black', fontdict={'weight': 'bold'})\n",
    "ax.set_ylabel(r\"${u(x, t)}$\", fontsize=26, color='black', fontdict={'weight': 'bold'})\n",
    "\n",
    "# Set the title with bold font weight\n",
    "ax.set_title(r\"${t = 1.5}$\", fontsize=26, color='black', fontweight='bold')\n",
    "\n",
    "# Set the number of ticks for x-axis and y-axis to 3\n",
    "ax.set_xticks([-5, 0, 5])\n",
    "ax.set_yticks([0, 2, 4])\n",
    "\n",
    "# Set tick labels fontweight to bold and increase font size\n",
    "ax.tick_params(axis='both', which='major', labelsize=20, width=2, length=10)\n",
    "\n",
    "# # Set the fontweight for tick labels to bold\n",
    "# for tick in ax.get_xticklabels() + ax.get_yticklabels():\n",
    "#     tick.set_weight('bold')\n",
    "\n",
    "# Set the spines linewidth to bold\n",
    "ax.spines['top'].set_linewidth(2)\n",
    "ax.spines['right'].set_linewidth(2)\n",
    "ax.spines['bottom'].set_linewidth(2)\n",
    "ax.spines['left'].set_linewidth(2)\n",
    "\n",
    "\n",
    "# Increase font size for x and y axis numbers\n",
    "ax.tick_params(axis='both', which='major', labelsize=24)\n",
    "\n",
    "# Set the legend\n",
    "# ax.legend()\n",
    "\n",
    "plt.savefig('RNN_1.5_20.pdf', dpi=500, bbox_inches=\"tight\")\n",
    "\n",
    "# Show the plot\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2521282d",
   "metadata": {},
   "outputs": [],
   "source": [
    "conc_u = torch.squeeze(input_tensor)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "1374d9aa",
   "metadata": {},
   "outputs": [],
   "source": [
    "concatenated_tensor = torch.cat((conc_u, prediction_tensor), dim=0)\n",
    "\n",
    "t1 = np.linspace(0, 1.5707 , 200)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c81d0bb4",
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from matplotlib.ticker import FixedLocator\n",
    "\n",
    "# Assuming you have defined concatenated_tensor as a PyTorch tensor\n",
    "# concatenated_tensor = torch.cat((tensor1, tensor2), dim=0)\n",
    "\n",
    "# Convert concatenated_tensor to a NumPy array\n",
    "concatenated_array = concatenated_tensor.numpy()\n",
    "\n",
    "# Define custom color levels\n",
    "x = np.linspace(0, 1, concatenated_array.shape[1])  # Replace 0 and 1 with your actual x range\n",
    "t = np.linspace(0, 1, concatenated_array.shape[0])  # Replace 0 and 1 with your actual t range\n",
    "X, T = np.meshgrid(x, t1)\n",
    "\n",
    "# Define custom color levels using the minimum and maximum from the NumPy array\n",
    "c_levels = np.linspace(np.min(concatenated_array), np.max(concatenated_array), 400)\n",
    "\n",
    "# Plot the contour with interpolated data\n",
    "plt.figure(figsize=(20, 5))\n",
    "plt.pcolormesh(T, X, concatenated_array, shading='auto', cmap='twilight')\n",
    "\n",
    "# Set the fontweight for axis labels to regular (not bold)\n",
    "plt.xlabel(\"$t$\", fontsize=26)\n",
    "plt.ylabel(\"$x$\", fontsize=26)\n",
    "plt.title(\"$|u(x, t)|$\", fontsize=26)\n",
    "\n",
    "# Set tick labels fontweight to regular (not bold) and increase font size\n",
    "plt.tick_params(axis='both', which='major', labelsize=20, width=3, length=10)\n",
    "\n",
    "# Set the fontweight for tick labels to regular (not bold)\n",
    "for tick in plt.gca().get_xticklabels() + plt.gca().get_yticklabels():\n",
    "    tick.set_weight('normal')\n",
    "\n",
    "# Set the number of ticks for x-axis and y-axis to 5\n",
    "num_ticks = 5\n",
    "x_ticks = np.linspace(np.min(T), np.max(T), num_ticks)\n",
    "y_ticks = np.linspace(np.min(X), np.max(X), num_ticks)\n",
    "\n",
    "plt.gca().xaxis.set_major_locator(FixedLocator(x_ticks))\n",
    "plt.gca().yaxis.set_major_locator(FixedLocator(y_ticks))\n",
    "\n",
    "cbar1 = plt.colorbar()\n",
    "# Set the number of ticks for the color bar with uniformly distributed numbers\n",
    "num_ticks = 5\n",
    "c_ticks = np.linspace(np.min(concatenated_array), np.max(concatenated_array), num_ticks)\n",
    "cbar1.set_ticks(c_ticks)\n",
    "\n",
    "# Set the fontweight and fontsize for color bar tick labels\n",
    "for t in cbar1.ax.get_yticklabels():\n",
    "    t.set_weight('normal')\n",
    "    t.set_fontsize(26)  # Increase the font size for color bar tick labels\n",
    "\n",
    "# Increase the size of numbers on axis and color bar\n",
    "plt.xticks(fontsize=26)\n",
    "plt.yticks(fontsize=26)\n",
    "\n",
    "# Increase the tick size and width of the color bar\n",
    "cbar1.ax.tick_params(axis='both', which='major', labelsize=30, width=3,  length=10)\n",
    "\n",
    "# Add a dotted line at t = 0.8\n",
    "plt.axvline(x=1.26449, color='black', linestyle='dotted', linewidth=5)\n",
    "\n",
    "#plt.savefig('Contour_LEM_20.pdf', dpi=500, bbox_inches=\"tight\")\n",
    "plt.savefig('contour_RNN_20.jpeg', dpi=500, bbox_inches=\"tight\")\n",
    "# Show the plot\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "d6d77eff",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "4cff7a40",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "pytorch",
   "language": "python",
   "name": "pytorch"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.10"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
