{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "5a996f09",
   "metadata": {},
   "outputs": [],
   "source": [
    "# importing libaries\n",
    "\n",
    "import torch\n",
    "import torch.nn as nn\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import scipy.io"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "21e06a8a",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Define the LSTM model\n",
    "class LSTM(nn.Module):\n",
    "    def __init__(self, input_size, hidden_size, output_size):\n",
    "        super(LSTM, self).__init__()\n",
    "\n",
    "        self.hidden_size = hidden_size\n",
    "\n",
    "        self.lstm = nn.LSTM(input_size, hidden_size, batch_first=True)\n",
    "        self.fc = nn.Linear(hidden_size, output_size)\n",
    "\n",
    "    def forward(self, x, hidden):\n",
    "        output, hidden = self.lstm(x, hidden)\n",
    "        output = self.fc(output)\n",
    "        return output, hidden\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "7b7a22b4",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Toy problem data\n",
    "input_size = 256  # number of columns in a dataset\n",
    "hidden_size = 32  # number of neurons\n",
    "output_size = 256  \n",
    "sequence_length = 160  # number of sequences/ number of rows\n",
    "batch_size = 1\n",
    "num_epochs = 20000"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "b6f89e2d",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Load the .mat file\n",
    "v_data = scipy.io.loadmat('v.mat')\n",
    "h_data = scipy.io.loadmat('h.mat')\n",
    "x_data = scipy.io.loadmat('x.mat')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "4189a364",
   "metadata": {},
   "outputs": [],
   "source": [
    "x = x_data['X']\n",
    "u = h_data['h']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "6b9bfc35",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<torch._C.Generator at 0x7f40a00bf790>"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# Set random seed for reproducibility\n",
    "torch.manual_seed(42)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "e49c1493",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "test data shape (256,)\n",
      "input data shape (160, 256)\n",
      "Target data shape (160, 256)\n"
     ]
    }
   ],
   "source": [
    "input_data = u[0:160,:]\n",
    "target_data = u[1:161, :]\n",
    "\n",
    "test_data = u[160, :]\n",
    "#test_target = u[161:201, :]\n",
    "\n",
    "print(\"test data shape\", test_data.shape)\n",
    "#print(\"test target shape\", test_target.shape)\n",
    "\n",
    "print(\"input data shape\",input_data.shape)\n",
    "print(\"Target data shape\",target_data.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "7f382fd7",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "input tensor shape torch.Size([1, 160, 256])\n",
      "Target tensor shape torch.Size([1, 160, 256])\n"
     ]
    }
   ],
   "source": [
    "# Convert data to tensors\n",
    "input_tensor = torch.tensor(input_data).view(batch_size, sequence_length, input_size).float()\n",
    "target_tensor = torch.tensor(target_data).view(batch_size, sequence_length, output_size).float()\n",
    "\n",
    "print(\"input tensor shape\",input_tensor.shape)\n",
    "print(\"Target tensor shape\",target_tensor.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "040679f2",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Convert test data to tensors\n",
    "test_tensor = torch.tensor(test_data).view(batch_size, 1, input_size).float()\n",
    "#test_target_tensor = torch.tensor(test_target).view(batch_size, 40, output_size).float()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b5dc1b21",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 10/20000, Loss: 0.17706659\n",
      "Epoch: 20/20000, Loss: 0.04944223\n",
      "Epoch: 30/20000, Loss: 0.03826120\n",
      "Epoch: 40/20000, Loss: 0.03608983\n",
      "Epoch: 50/20000, Loss: 0.03149891\n",
      "Epoch: 60/20000, Loss: 0.03100103\n",
      "Epoch: 70/20000, Loss: 0.03076473\n",
      "Epoch: 80/20000, Loss: 0.03058243\n",
      "Epoch: 90/20000, Loss: 0.03055021\n",
      "Epoch: 100/20000, Loss: 0.03016698\n",
      "Epoch: 110/20000, Loss: 0.02829106\n",
      "Epoch: 120/20000, Loss: 0.02481548\n",
      "Epoch: 130/20000, Loss: 0.02091661\n",
      "Epoch: 140/20000, Loss: 0.01758108\n",
      "Epoch: 150/20000, Loss: 0.01504118\n",
      "Epoch: 160/20000, Loss: 0.01308300\n",
      "Epoch: 170/20000, Loss: 0.01153052\n",
      "Epoch: 180/20000, Loss: 0.01026766\n",
      "Epoch: 190/20000, Loss: 0.00921789\n",
      "Epoch: 200/20000, Loss: 0.00832881\n",
      "Epoch: 210/20000, Loss: 0.00756419\n",
      "Epoch: 220/20000, Loss: 0.00689842\n",
      "Epoch: 230/20000, Loss: 0.00631265\n",
      "Epoch: 240/20000, Loss: 0.00579174\n",
      "Epoch: 250/20000, Loss: 0.00531701\n",
      "Epoch: 260/20000, Loss: 0.00481365\n",
      "Epoch: 270/20000, Loss: 0.00390480\n",
      "Epoch: 280/20000, Loss: 0.00243327\n",
      "Epoch: 290/20000, Loss: 0.00137593\n",
      "Epoch: 300/20000, Loss: 0.00096824\n",
      "Epoch: 310/20000, Loss: 0.00082747\n",
      "Epoch: 320/20000, Loss: 0.00076151\n",
      "Epoch: 330/20000, Loss: 0.00076168\n",
      "Epoch: 340/20000, Loss: 0.00076597\n",
      "Epoch: 350/20000, Loss: 0.00071739\n",
      "Epoch: 360/20000, Loss: 0.00068018\n",
      "Epoch: 370/20000, Loss: 0.00065403\n",
      "Epoch: 380/20000, Loss: 0.00063759\n",
      "Epoch: 390/20000, Loss: 0.00062010\n",
      "Epoch: 400/20000, Loss: 0.00060325\n",
      "Epoch: 410/20000, Loss: 0.00058731\n",
      "Epoch: 420/20000, Loss: 0.00058422\n",
      "Epoch: 430/20000, Loss: 0.00059469\n",
      "Epoch: 440/20000, Loss: 0.00056611\n",
      "Epoch: 450/20000, Loss: 0.00053405\n",
      "Epoch: 460/20000, Loss: 0.00051631\n",
      "Epoch: 470/20000, Loss: 0.00050400\n",
      "Epoch: 480/20000, Loss: 0.00049114\n",
      "Epoch: 490/20000, Loss: 0.00047944\n",
      "Epoch: 500/20000, Loss: 0.00046827\n",
      "Epoch: 510/20000, Loss: 0.00045764\n",
      "Epoch: 520/20000, Loss: 0.00044765\n",
      "Epoch: 530/20000, Loss: 0.00043814\n",
      "Epoch: 540/20000, Loss: 0.00042914\n",
      "Epoch: 550/20000, Loss: 0.00042074\n",
      "Epoch: 560/20000, Loss: 0.00041273\n",
      "Epoch: 570/20000, Loss: 0.00040517\n",
      "Epoch: 580/20000, Loss: 0.00039849\n",
      "Epoch: 590/20000, Loss: 0.00046231\n",
      "Epoch: 600/20000, Loss: 0.00038595\n",
      "Epoch: 610/20000, Loss: 0.00038030\n",
      "Epoch: 620/20000, Loss: 0.00037552\n",
      "Epoch: 630/20000, Loss: 0.00036942\n",
      "Epoch: 640/20000, Loss: 0.00036375\n",
      "Epoch: 650/20000, Loss: 0.00035908\n",
      "Epoch: 660/20000, Loss: 0.00035463\n",
      "Epoch: 670/20000, Loss: 0.00035041\n",
      "Epoch: 680/20000, Loss: 0.00034671\n",
      "Epoch: 690/20000, Loss: 0.00034283\n",
      "Epoch: 700/20000, Loss: 0.00034086\n",
      "Epoch: 710/20000, Loss: 0.00047228\n",
      "Epoch: 720/20000, Loss: 0.00036984\n",
      "Epoch: 730/20000, Loss: 0.00033221\n",
      "Epoch: 740/20000, Loss: 0.00032774\n",
      "Epoch: 750/20000, Loss: 0.00032484\n",
      "Epoch: 760/20000, Loss: 0.00032253\n",
      "Epoch: 770/20000, Loss: 0.00032008\n",
      "Epoch: 780/20000, Loss: 0.00031765\n",
      "Epoch: 790/20000, Loss: 0.00031635\n",
      "Epoch: 800/20000, Loss: 0.00031370\n",
      "Epoch: 810/20000, Loss: 0.00031185\n",
      "Epoch: 820/20000, Loss: 0.00033684\n",
      "Epoch: 830/20000, Loss: 0.00035166\n",
      "Epoch: 840/20000, Loss: 0.00034317\n",
      "Epoch: 850/20000, Loss: 0.00031236\n",
      "Epoch: 860/20000, Loss: 0.00030822\n",
      "Epoch: 870/20000, Loss: 0.00030293\n",
      "Epoch: 880/20000, Loss: 0.00030184\n",
      "Epoch: 890/20000, Loss: 0.00030012\n",
      "Epoch: 900/20000, Loss: 0.00029862\n",
      "Epoch: 910/20000, Loss: 0.00029766\n",
      "Epoch: 920/20000, Loss: 0.00029607\n",
      "Epoch: 930/20000, Loss: 0.00029595\n",
      "Epoch: 940/20000, Loss: 0.00029461\n",
      "Epoch: 950/20000, Loss: 0.00029277\n",
      "Epoch: 960/20000, Loss: 0.00031329\n",
      "Epoch: 970/20000, Loss: 0.00032998\n",
      "Epoch: 980/20000, Loss: 0.00032362\n",
      "Epoch: 990/20000, Loss: 0.00029408\n",
      "Epoch: 1000/20000, Loss: 0.00029421\n",
      "Epoch: 1010/20000, Loss: 0.00028935\n",
      "Epoch: 1020/20000, Loss: 0.00028771\n",
      "Epoch: 1030/20000, Loss: 0.00028679\n",
      "Epoch: 1040/20000, Loss: 0.00028629\n",
      "Epoch: 1050/20000, Loss: 0.00028532\n",
      "Epoch: 1060/20000, Loss: 0.00028426\n",
      "Epoch: 1070/20000, Loss: 0.00028427\n",
      "Epoch: 1080/20000, Loss: 0.00028344\n",
      "Epoch: 1090/20000, Loss: 0.00028628\n",
      "Epoch: 1100/20000, Loss: 0.00029048\n",
      "Epoch: 1110/20000, Loss: 0.00030561\n",
      "Epoch: 1120/20000, Loss: 0.00028136\n",
      "Epoch: 1130/20000, Loss: 0.00028069\n",
      "Epoch: 1140/20000, Loss: 0.00028079\n",
      "Epoch: 1150/20000, Loss: 0.00027981\n",
      "Epoch: 1160/20000, Loss: 0.00027868\n",
      "Epoch: 1170/20000, Loss: 0.00027795\n",
      "Epoch: 1180/20000, Loss: 0.00027735\n",
      "Epoch: 1190/20000, Loss: 0.00027745\n",
      "Epoch: 1200/20000, Loss: 0.00035472\n",
      "Epoch: 1210/20000, Loss: 0.00027764\n",
      "Epoch: 1220/20000, Loss: 0.00028000\n",
      "Epoch: 1230/20000, Loss: 0.00027819\n",
      "Epoch: 1240/20000, Loss: 0.00027545\n",
      "Epoch: 1250/20000, Loss: 0.00027451\n",
      "Epoch: 1260/20000, Loss: 0.00027443\n",
      "Epoch: 1270/20000, Loss: 0.00028070\n",
      "Epoch: 1280/20000, Loss: 0.00029826\n",
      "Epoch: 1290/20000, Loss: 0.00028472\n",
      "Epoch: 1300/20000, Loss: 0.00027660\n",
      "Epoch: 1310/20000, Loss: 0.00027107\n",
      "Epoch: 1320/20000, Loss: 0.00028057\n",
      "Epoch: 1330/20000, Loss: 0.00028000\n",
      "Epoch: 1340/20000, Loss: 0.00031212\n",
      "Epoch: 1350/20000, Loss: 0.00028472\n",
      "Epoch: 1360/20000, Loss: 0.00028295\n",
      "Epoch: 1370/20000, Loss: 0.00027369\n",
      "Epoch: 1380/20000, Loss: 0.00026845\n",
      "Epoch: 1390/20000, Loss: 0.00026970\n",
      "Epoch: 1400/20000, Loss: 0.00031917\n",
      "Epoch: 1410/20000, Loss: 0.00029907\n",
      "Epoch: 1420/20000, Loss: 0.00027627\n",
      "Epoch: 1430/20000, Loss: 0.00027035\n",
      "Epoch: 1440/20000, Loss: 0.00026584\n",
      "Epoch: 1450/20000, Loss: 0.00026682\n",
      "Epoch: 1460/20000, Loss: 0.00030607\n",
      "Epoch: 1470/20000, Loss: 0.00028958\n",
      "Epoch: 1480/20000, Loss: 0.00027066\n",
      "Epoch: 1490/20000, Loss: 0.00026729\n",
      "Epoch: 1500/20000, Loss: 0.00026388\n",
      "Epoch: 1510/20000, Loss: 0.00026516\n",
      "Epoch: 1520/20000, Loss: 0.00028757\n",
      "Epoch: 1530/20000, Loss: 0.00026420\n",
      "Epoch: 1540/20000, Loss: 0.00028847\n",
      "Epoch: 1550/20000, Loss: 0.00027045\n",
      "Epoch: 1560/20000, Loss: 0.00026450\n",
      "Epoch: 1570/20000, Loss: 0.00026081\n",
      "Epoch: 1580/20000, Loss: 0.00026682\n",
      "Epoch: 1590/20000, Loss: 0.00032174\n",
      "Epoch: 1600/20000, Loss: 0.00029336\n",
      "Epoch: 1610/20000, Loss: 0.00026888\n",
      "Epoch: 1620/20000, Loss: 0.00026384\n",
      "Epoch: 1630/20000, Loss: 0.00025980\n",
      "Epoch: 1640/20000, Loss: 0.00026270\n",
      "Epoch: 1650/20000, Loss: 0.00028895\n",
      "Epoch: 1660/20000, Loss: 0.00026043\n",
      "Epoch: 1670/20000, Loss: 0.00027107\n",
      "Epoch: 1680/20000, Loss: 0.00026522\n",
      "Epoch: 1690/20000, Loss: 0.00025791\n",
      "Epoch: 1700/20000, Loss: 0.00025924\n",
      "Epoch: 1710/20000, Loss: 0.00027624\n",
      "Epoch: 1720/20000, Loss: 0.00026815\n",
      "Epoch: 1730/20000, Loss: 0.00027505\n",
      "Epoch: 1740/20000, Loss: 0.00026164\n",
      "Epoch: 1750/20000, Loss: 0.00025683\n",
      "Epoch: 1760/20000, Loss: 0.00025401\n",
      "Epoch: 1770/20000, Loss: 0.00025333\n",
      "Epoch: 1780/20000, Loss: 0.00027255\n",
      "Epoch: 1790/20000, Loss: 0.00028909\n",
      "Epoch: 1800/20000, Loss: 0.00025604\n",
      "Epoch: 1810/20000, Loss: 0.00025594\n",
      "Epoch: 1820/20000, Loss: 0.00025334\n",
      "Epoch: 1830/20000, Loss: 0.00025216\n",
      "Epoch: 1840/20000, Loss: 0.00025154\n",
      "Epoch: 1850/20000, Loss: 0.00025086\n",
      "Epoch: 1860/20000, Loss: 0.00025104\n",
      "Epoch: 1870/20000, Loss: 0.00025153\n",
      "Epoch: 1880/20000, Loss: 0.00025289\n",
      "Epoch: 1890/20000, Loss: 0.00025103\n",
      "Epoch: 1900/20000, Loss: 0.00025010\n",
      "Epoch: 1910/20000, Loss: 0.00033039\n",
      "Epoch: 1920/20000, Loss: 0.00035289\n",
      "Epoch: 1930/20000, Loss: 0.00028277\n",
      "Epoch: 1940/20000, Loss: 0.00025769\n",
      "Epoch: 1950/20000, Loss: 0.00025020\n",
      "Epoch: 1960/20000, Loss: 0.00024785\n",
      "Epoch: 1970/20000, Loss: 0.00024674\n",
      "Epoch: 1980/20000, Loss: 0.00024634\n",
      "Epoch: 1990/20000, Loss: 0.00024592\n",
      "Epoch: 2000/20000, Loss: 0.00024500\n",
      "Epoch: 2010/20000, Loss: 0.00024591\n",
      "Epoch: 2020/20000, Loss: 0.00024711\n",
      "Epoch: 2030/20000, Loss: 0.00024481\n",
      "Epoch: 2040/20000, Loss: 0.00038422\n",
      "Epoch: 2050/20000, Loss: 0.00027005\n",
      "Epoch: 2060/20000, Loss: 0.00028618\n",
      "Epoch: 2070/20000, Loss: 0.00024617\n",
      "Epoch: 2080/20000, Loss: 0.00024853\n",
      "Epoch: 2090/20000, Loss: 0.00024232\n",
      "Epoch: 2100/20000, Loss: 0.00024107\n",
      "Epoch: 2110/20000, Loss: 0.00024082\n",
      "Epoch: 2120/20000, Loss: 0.00024050\n",
      "Epoch: 2130/20000, Loss: 0.00023961\n",
      "Epoch: 2140/20000, Loss: 0.00023942\n",
      "Epoch: 2150/20000, Loss: 0.00023949\n",
      "Epoch: 2160/20000, Loss: 0.00023993\n",
      "Epoch: 2170/20000, Loss: 0.00023967\n",
      "Epoch: 2180/20000, Loss: 0.00023951\n",
      "Epoch: 2190/20000, Loss: 0.00023859\n",
      "Epoch: 2200/20000, Loss: 0.00025706\n",
      "Epoch: 2210/20000, Loss: 0.00029567\n",
      "Epoch: 2220/20000, Loss: 0.00031561\n",
      "Epoch: 2230/20000, Loss: 0.00024874\n",
      "Epoch: 2240/20000, Loss: 0.00023824\n",
      "Epoch: 2250/20000, Loss: 0.00023797\n",
      "Epoch: 2260/20000, Loss: 0.00023468\n",
      "Epoch: 2270/20000, Loss: 0.00023439\n",
      "Epoch: 2280/20000, Loss: 0.00023394\n",
      "Epoch: 2290/20000, Loss: 0.00023308\n",
      "Epoch: 2300/20000, Loss: 0.00023199\n",
      "Epoch: 2310/20000, Loss: 0.00023502\n",
      "Epoch: 2320/20000, Loss: 0.00023493\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 2330/20000, Loss: 0.00023256\n",
      "Epoch: 2340/20000, Loss: 0.00023050\n",
      "Epoch: 2350/20000, Loss: 0.00023062\n",
      "Epoch: 2360/20000, Loss: 0.00023111\n",
      "Epoch: 2370/20000, Loss: 0.00023902\n",
      "Epoch: 2380/20000, Loss: 0.00041539\n",
      "Epoch: 2390/20000, Loss: 0.00025054\n",
      "Epoch: 2400/20000, Loss: 0.00025071\n",
      "Epoch: 2410/20000, Loss: 0.00023557\n",
      "Epoch: 2420/20000, Loss: 0.00022752\n",
      "Epoch: 2430/20000, Loss: 0.00022690\n",
      "Epoch: 2440/20000, Loss: 0.00022570\n",
      "Epoch: 2450/20000, Loss: 0.00022543\n",
      "Epoch: 2460/20000, Loss: 0.00022557\n",
      "Epoch: 2470/20000, Loss: 0.00022642\n",
      "Epoch: 2480/20000, Loss: 0.00022697\n",
      "Epoch: 2490/20000, Loss: 0.00022783\n",
      "Epoch: 2500/20000, Loss: 0.00022640\n",
      "Epoch: 2510/20000, Loss: 0.00022591\n",
      "Epoch: 2520/20000, Loss: 0.00022445\n",
      "Epoch: 2530/20000, Loss: 0.00022310\n",
      "Epoch: 2540/20000, Loss: 0.00042690\n",
      "Epoch: 2550/20000, Loss: 0.00026674\n",
      "Epoch: 2560/20000, Loss: 0.00022820\n",
      "Epoch: 2570/20000, Loss: 0.00022278\n",
      "Epoch: 2580/20000, Loss: 0.00022070\n",
      "Epoch: 2590/20000, Loss: 0.00021975\n",
      "Epoch: 2600/20000, Loss: 0.00021880\n",
      "Epoch: 2610/20000, Loss: 0.00021953\n",
      "Epoch: 2620/20000, Loss: 0.00021963\n",
      "Epoch: 2630/20000, Loss: 0.00022034\n",
      "Epoch: 2640/20000, Loss: 0.00021948\n",
      "Epoch: 2650/20000, Loss: 0.00021896\n",
      "Epoch: 2660/20000, Loss: 0.00021754\n",
      "Epoch: 2670/20000, Loss: 0.00021741\n",
      "Epoch: 2680/20000, Loss: 0.00021828\n",
      "Epoch: 2690/20000, Loss: 0.00021793\n",
      "Epoch: 2700/20000, Loss: 0.00028040\n",
      "Epoch: 2710/20000, Loss: 0.00042961\n",
      "Epoch: 2720/20000, Loss: 0.00027687\n",
      "Epoch: 2730/20000, Loss: 0.00022185\n",
      "Epoch: 2740/20000, Loss: 0.00021462\n",
      "Epoch: 2750/20000, Loss: 0.00021345\n",
      "Epoch: 2760/20000, Loss: 0.00021333\n",
      "Epoch: 2770/20000, Loss: 0.00021338\n",
      "Epoch: 2780/20000, Loss: 0.00021254\n",
      "Epoch: 2790/20000, Loss: 0.00021155\n",
      "Epoch: 2800/20000, Loss: 0.00021391\n",
      "Epoch: 2810/20000, Loss: 0.00021168\n",
      "Epoch: 2820/20000, Loss: 0.00021147\n",
      "Epoch: 2830/20000, Loss: 0.00021123\n",
      "Epoch: 2840/20000, Loss: 0.00021070\n",
      "Epoch: 2850/20000, Loss: 0.00021039\n",
      "Epoch: 2860/20000, Loss: 0.00021069\n",
      "Epoch: 2870/20000, Loss: 0.00021122\n",
      "Epoch: 2880/20000, Loss: 0.00031927\n",
      "Epoch: 2890/20000, Loss: 0.00030173\n",
      "Epoch: 2900/20000, Loss: 0.00021453\n",
      "Epoch: 2910/20000, Loss: 0.00022038\n",
      "Epoch: 2920/20000, Loss: 0.00020654\n",
      "Epoch: 2930/20000, Loss: 0.00020588\n",
      "Epoch: 2940/20000, Loss: 0.00020465\n",
      "Epoch: 2950/20000, Loss: 0.00020482\n",
      "Epoch: 2960/20000, Loss: 0.00020498\n",
      "Epoch: 2970/20000, Loss: 0.00020393\n",
      "Epoch: 2980/20000, Loss: 0.00020427\n",
      "Epoch: 2990/20000, Loss: 0.00020361\n",
      "Epoch: 3000/20000, Loss: 0.00020739\n",
      "Epoch: 3010/20000, Loss: 0.00034363\n",
      "Epoch: 3020/20000, Loss: 0.00036518\n",
      "Epoch: 3030/20000, Loss: 0.00025788\n",
      "Epoch: 3040/20000, Loss: 0.00022028\n",
      "Epoch: 3050/20000, Loss: 0.00020154\n",
      "Epoch: 3060/20000, Loss: 0.00020180\n",
      "Epoch: 3070/20000, Loss: 0.00020113\n",
      "Epoch: 3080/20000, Loss: 0.00020035\n",
      "Epoch: 3090/20000, Loss: 0.00019893\n",
      "Epoch: 3100/20000, Loss: 0.00019782\n",
      "Epoch: 3110/20000, Loss: 0.00019801\n",
      "Epoch: 3120/20000, Loss: 0.00019901\n",
      "Epoch: 3130/20000, Loss: 0.00020187\n",
      "Epoch: 3140/20000, Loss: 0.00020248\n",
      "Epoch: 3150/20000, Loss: 0.00020058\n",
      "Epoch: 3160/20000, Loss: 0.00020195\n",
      "Epoch: 3170/20000, Loss: 0.00021795\n",
      "Epoch: 3180/20000, Loss: 0.00028675\n",
      "Epoch: 3190/20000, Loss: 0.00024984\n",
      "Epoch: 3200/20000, Loss: 0.00022007\n",
      "Epoch: 3210/20000, Loss: 0.00020568\n",
      "Epoch: 3220/20000, Loss: 0.00019390\n",
      "Epoch: 3230/20000, Loss: 0.00019415\n",
      "Epoch: 3240/20000, Loss: 0.00019257\n",
      "Epoch: 3250/20000, Loss: 0.00019151\n",
      "Epoch: 3260/20000, Loss: 0.00019187\n",
      "Epoch: 3270/20000, Loss: 0.00019194\n",
      "Epoch: 3280/20000, Loss: 0.00019112\n",
      "Epoch: 3290/20000, Loss: 0.00019687\n",
      "Epoch: 3300/20000, Loss: 0.00019585\n",
      "Epoch: 3310/20000, Loss: 0.00019551\n",
      "Epoch: 3320/20000, Loss: 0.00019523\n",
      "Epoch: 3330/20000, Loss: 0.00020011\n",
      "Epoch: 3340/20000, Loss: 0.00048200\n",
      "Epoch: 3350/20000, Loss: 0.00020861\n",
      "Epoch: 3360/20000, Loss: 0.00022081\n",
      "Epoch: 3370/20000, Loss: 0.00019158\n",
      "Epoch: 3380/20000, Loss: 0.00019206\n",
      "Epoch: 3390/20000, Loss: 0.00018638\n",
      "Epoch: 3400/20000, Loss: 0.00018558\n",
      "Epoch: 3410/20000, Loss: 0.00018477\n",
      "Epoch: 3420/20000, Loss: 0.00018725\n",
      "Epoch: 3430/20000, Loss: 0.00018735\n",
      "Epoch: 3440/20000, Loss: 0.00018742\n",
      "Epoch: 3450/20000, Loss: 0.00018808\n",
      "Epoch: 3460/20000, Loss: 0.00018850\n",
      "Epoch: 3470/20000, Loss: 0.00024760\n",
      "Epoch: 3480/20000, Loss: 0.00021810\n",
      "Epoch: 3490/20000, Loss: 0.00021643\n",
      "Epoch: 3500/20000, Loss: 0.00019266\n",
      "Epoch: 3510/20000, Loss: 0.00018803\n",
      "Epoch: 3520/20000, Loss: 0.00018377\n",
      "Epoch: 3530/20000, Loss: 0.00018374\n",
      "Epoch: 3540/20000, Loss: 0.00018073\n",
      "Epoch: 3550/20000, Loss: 0.00018112\n",
      "Epoch: 3560/20000, Loss: 0.00018363\n",
      "Epoch: 3570/20000, Loss: 0.00018191\n",
      "Epoch: 3580/20000, Loss: 0.00018171\n",
      "Epoch: 3590/20000, Loss: 0.00018391\n",
      "Epoch: 3600/20000, Loss: 0.00043014\n",
      "Epoch: 3610/20000, Loss: 0.00025512\n",
      "Epoch: 3620/20000, Loss: 0.00021267\n",
      "Epoch: 3630/20000, Loss: 0.00018243\n",
      "Epoch: 3640/20000, Loss: 0.00017762\n",
      "Epoch: 3650/20000, Loss: 0.00017659\n",
      "Epoch: 3660/20000, Loss: 0.00017634\n",
      "Epoch: 3670/20000, Loss: 0.00017790\n",
      "Epoch: 3680/20000, Loss: 0.00017601\n",
      "Epoch: 3690/20000, Loss: 0.00017584\n",
      "Epoch: 3700/20000, Loss: 0.00018037\n",
      "Epoch: 3710/20000, Loss: 0.00018264\n",
      "Epoch: 3720/20000, Loss: 0.00020289\n",
      "Epoch: 3730/20000, Loss: 0.00030313\n",
      "Epoch: 3740/20000, Loss: 0.00019495\n",
      "Epoch: 3750/20000, Loss: 0.00019560\n",
      "Epoch: 3760/20000, Loss: 0.00017737\n",
      "Epoch: 3770/20000, Loss: 0.00017233\n",
      "Epoch: 3780/20000, Loss: 0.00017203\n",
      "Epoch: 3790/20000, Loss: 0.00017155\n",
      "Epoch: 3800/20000, Loss: 0.00017091\n",
      "Epoch: 3810/20000, Loss: 0.00017133\n",
      "Epoch: 3820/20000, Loss: 0.00016943\n",
      "Epoch: 3830/20000, Loss: 0.00018843\n",
      "Epoch: 3840/20000, Loss: 0.00038930\n",
      "Epoch: 3850/20000, Loss: 0.00018519\n",
      "Epoch: 3860/20000, Loss: 0.00019529\n",
      "Epoch: 3870/20000, Loss: 0.00017442\n",
      "Epoch: 3880/20000, Loss: 0.00016705\n",
      "Epoch: 3890/20000, Loss: 0.00016691\n",
      "Epoch: 3900/20000, Loss: 0.00016964\n",
      "Epoch: 3910/20000, Loss: 0.00016849\n",
      "Epoch: 3920/20000, Loss: 0.00016960\n",
      "Epoch: 3930/20000, Loss: 0.00016800\n",
      "Epoch: 3940/20000, Loss: 0.00016747\n",
      "Epoch: 3950/20000, Loss: 0.00023117\n",
      "Epoch: 3960/20000, Loss: 0.00020528\n",
      "Epoch: 3970/20000, Loss: 0.00020350\n",
      "Epoch: 3980/20000, Loss: 0.00017642\n",
      "Epoch: 3990/20000, Loss: 0.00016941\n",
      "Epoch: 4000/20000, Loss: 0.00016222\n",
      "Epoch: 4010/20000, Loss: 0.00016330\n",
      "Epoch: 4020/20000, Loss: 0.00016166\n",
      "Epoch: 4030/20000, Loss: 0.00016101\n",
      "Epoch: 4040/20000, Loss: 0.00016582\n",
      "Epoch: 4050/20000, Loss: 0.00016579\n",
      "Epoch: 4060/20000, Loss: 0.00016712\n",
      "Epoch: 4070/20000, Loss: 0.00016990\n",
      "Epoch: 4080/20000, Loss: 0.00041087\n",
      "Epoch: 4090/20000, Loss: 0.00025089\n",
      "Epoch: 4100/20000, Loss: 0.00016892\n",
      "Epoch: 4110/20000, Loss: 0.00016999\n",
      "Epoch: 4120/20000, Loss: 0.00016104\n",
      "Epoch: 4130/20000, Loss: 0.00015773\n",
      "Epoch: 4140/20000, Loss: 0.00015763\n",
      "Epoch: 4150/20000, Loss: 0.00015826\n",
      "Epoch: 4160/20000, Loss: 0.00015809\n",
      "Epoch: 4170/20000, Loss: 0.00015591\n",
      "Epoch: 4180/20000, Loss: 0.00015713\n",
      "Epoch: 4190/20000, Loss: 0.00018783\n",
      "Epoch: 4200/20000, Loss: 0.00029052\n",
      "Epoch: 4210/20000, Loss: 0.00021195\n",
      "Epoch: 4220/20000, Loss: 0.00016571\n",
      "Epoch: 4230/20000, Loss: 0.00015548\n",
      "Epoch: 4240/20000, Loss: 0.00015518\n",
      "Epoch: 4250/20000, Loss: 0.00015388\n",
      "Epoch: 4260/20000, Loss: 0.00015250\n",
      "Epoch: 4270/20000, Loss: 0.00015413\n",
      "Epoch: 4280/20000, Loss: 0.00015617\n",
      "Epoch: 4290/20000, Loss: 0.00015689\n",
      "Epoch: 4300/20000, Loss: 0.00015594\n",
      "Epoch: 4310/20000, Loss: 0.00015505\n",
      "Epoch: 4320/20000, Loss: 0.00025278\n",
      "Epoch: 4330/20000, Loss: 0.00023709\n",
      "Epoch: 4340/20000, Loss: 0.00017012\n",
      "Epoch: 4350/20000, Loss: 0.00016913\n",
      "Epoch: 4360/20000, Loss: 0.00015229\n",
      "Epoch: 4370/20000, Loss: 0.00014751\n",
      "Epoch: 4380/20000, Loss: 0.00014858\n",
      "Epoch: 4390/20000, Loss: 0.00014841\n",
      "Epoch: 4400/20000, Loss: 0.00014741\n",
      "Epoch: 4410/20000, Loss: 0.00014688\n",
      "Epoch: 4420/20000, Loss: 0.00015145\n",
      "Epoch: 4430/20000, Loss: 0.00015316\n",
      "Epoch: 4440/20000, Loss: 0.00015108\n",
      "Epoch: 4450/20000, Loss: 0.00018112\n",
      "Epoch: 4460/20000, Loss: 0.00025068\n",
      "Epoch: 4470/20000, Loss: 0.00019502\n",
      "Epoch: 4480/20000, Loss: 0.00016244\n",
      "Epoch: 4490/20000, Loss: 0.00014475\n",
      "Epoch: 4500/20000, Loss: 0.00014329\n",
      "Epoch: 4510/20000, Loss: 0.00014323\n",
      "Epoch: 4520/20000, Loss: 0.00014298\n",
      "Epoch: 4530/20000, Loss: 0.00014359\n",
      "Epoch: 4540/20000, Loss: 0.00014475\n",
      "Epoch: 4550/20000, Loss: 0.00014403\n",
      "Epoch: 4560/20000, Loss: 0.00014531\n",
      "Epoch: 4570/20000, Loss: 0.00016227\n",
      "Epoch: 4580/20000, Loss: 0.00045876\n",
      "Epoch: 4590/20000, Loss: 0.00015676\n",
      "Epoch: 4600/20000, Loss: 0.00017187\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 4610/20000, Loss: 0.00014230\n",
      "Epoch: 4620/20000, Loss: 0.00013866\n",
      "Epoch: 4630/20000, Loss: 0.00014027\n",
      "Epoch: 4640/20000, Loss: 0.00014047\n",
      "Epoch: 4650/20000, Loss: 0.00014196\n",
      "Epoch: 4660/20000, Loss: 0.00013998\n",
      "Epoch: 4670/20000, Loss: 0.00014278\n",
      "Epoch: 4680/20000, Loss: 0.00013946\n",
      "Epoch: 4690/20000, Loss: 0.00013907\n",
      "Epoch: 4700/20000, Loss: 0.00014417\n",
      "Epoch: 4710/20000, Loss: 0.00048909\n",
      "Epoch: 4720/20000, Loss: 0.00023746\n",
      "Epoch: 4730/20000, Loss: 0.00018814\n",
      "Epoch: 4740/20000, Loss: 0.00013825\n",
      "Epoch: 4750/20000, Loss: 0.00013937\n",
      "Epoch: 4760/20000, Loss: 0.00013290\n",
      "Epoch: 4770/20000, Loss: 0.00013349\n",
      "Epoch: 4780/20000, Loss: 0.00013442\n",
      "Epoch: 4790/20000, Loss: 0.00013761\n",
      "Epoch: 4800/20000, Loss: 0.00013550\n",
      "Epoch: 4810/20000, Loss: 0.00013688\n",
      "Epoch: 4820/20000, Loss: 0.00014134\n",
      "Epoch: 4830/20000, Loss: 0.00014404\n",
      "Epoch: 4840/20000, Loss: 0.00018392\n",
      "Epoch: 4850/20000, Loss: 0.00018478\n",
      "Epoch: 4860/20000, Loss: 0.00013808\n",
      "Epoch: 4870/20000, Loss: 0.00013371\n",
      "Epoch: 4880/20000, Loss: 0.00013370\n",
      "Epoch: 4890/20000, Loss: 0.00013053\n",
      "Epoch: 4900/20000, Loss: 0.00012908\n",
      "Epoch: 4910/20000, Loss: 0.00012942\n",
      "Epoch: 4920/20000, Loss: 0.00013195\n",
      "Epoch: 4930/20000, Loss: 0.00017095\n",
      "Epoch: 4940/20000, Loss: 0.00030398\n",
      "Epoch: 4950/20000, Loss: 0.00020373\n",
      "Epoch: 4960/20000, Loss: 0.00013408\n",
      "Epoch: 4970/20000, Loss: 0.00013550\n",
      "Epoch: 4980/20000, Loss: 0.00012587\n",
      "Epoch: 4990/20000, Loss: 0.00012557\n",
      "Epoch: 5000/20000, Loss: 0.00012423\n",
      "Epoch: 5010/20000, Loss: 0.00012513\n",
      "Epoch: 5020/20000, Loss: 0.00012629\n",
      "Epoch: 5030/20000, Loss: 0.00013334\n",
      "Epoch: 5040/20000, Loss: 0.00012745\n",
      "Epoch: 5050/20000, Loss: 0.00012705\n",
      "Epoch: 5060/20000, Loss: 0.00019884\n",
      "Epoch: 5070/20000, Loss: 0.00012177\n",
      "Epoch: 5080/20000, Loss: 0.00011801\n",
      "Epoch: 5090/20000, Loss: 0.00011711\n",
      "Epoch: 5100/20000, Loss: 0.00011709\n",
      "Epoch: 5110/20000, Loss: 0.00011441\n",
      "Epoch: 5120/20000, Loss: 0.00011866\n",
      "Epoch: 5130/20000, Loss: 0.00012372\n",
      "Epoch: 5140/20000, Loss: 0.00022734\n",
      "Epoch: 5150/20000, Loss: 0.00014369\n",
      "Epoch: 5160/20000, Loss: 0.00013557\n",
      "Epoch: 5170/20000, Loss: 0.00011887\n",
      "Epoch: 5180/20000, Loss: 0.00011129\n",
      "Epoch: 5190/20000, Loss: 0.00010876\n",
      "Epoch: 5200/20000, Loss: 0.00011180\n",
      "Epoch: 5210/20000, Loss: 0.00011184\n",
      "Epoch: 5220/20000, Loss: 0.00011003\n",
      "Epoch: 5230/20000, Loss: 0.00010904\n",
      "Epoch: 5240/20000, Loss: 0.00013154\n",
      "Epoch: 5250/20000, Loss: 0.00040330\n",
      "Epoch: 5260/20000, Loss: 0.00016870\n",
      "Epoch: 5270/20000, Loss: 0.00011777\n",
      "Epoch: 5280/20000, Loss: 0.00011888\n",
      "Epoch: 5290/20000, Loss: 0.00010483\n",
      "Epoch: 5300/20000, Loss: 0.00010635\n",
      "Epoch: 5310/20000, Loss: 0.00010465\n",
      "Epoch: 5320/20000, Loss: 0.00010576\n",
      "Epoch: 5330/20000, Loss: 0.00010734\n",
      "Epoch: 5340/20000, Loss: 0.00010894\n",
      "Epoch: 5350/20000, Loss: 0.00010730\n",
      "Epoch: 5360/20000, Loss: 0.00010686\n",
      "Epoch: 5370/20000, Loss: 0.00010921\n",
      "Epoch: 5380/20000, Loss: 0.00010839\n",
      "Epoch: 5390/20000, Loss: 0.00011400\n",
      "Epoch: 5400/20000, Loss: 0.00033092\n",
      "Epoch: 5410/20000, Loss: 0.00021938\n",
      "Epoch: 5420/20000, Loss: 0.00011698\n",
      "Epoch: 5430/20000, Loss: 0.00010883\n",
      "Epoch: 5440/20000, Loss: 0.00010395\n",
      "Epoch: 5450/20000, Loss: 0.00009934\n",
      "Epoch: 5460/20000, Loss: 0.00009809\n",
      "Epoch: 5470/20000, Loss: 0.00010002\n",
      "Epoch: 5480/20000, Loss: 0.00009979\n",
      "Epoch: 5490/20000, Loss: 0.00010519\n",
      "Epoch: 5500/20000, Loss: 0.00010450\n",
      "Epoch: 5510/20000, Loss: 0.00010461\n",
      "Epoch: 5520/20000, Loss: 0.00010810\n",
      "Epoch: 5530/20000, Loss: 0.00011849\n",
      "Epoch: 5540/20000, Loss: 0.00047077\n",
      "Epoch: 5550/20000, Loss: 0.00014290\n",
      "Epoch: 5560/20000, Loss: 0.00010770\n",
      "Epoch: 5570/20000, Loss: 0.00010756\n",
      "Epoch: 5580/20000, Loss: 0.00009658\n",
      "Epoch: 5590/20000, Loss: 0.00009610\n",
      "Epoch: 5600/20000, Loss: 0.00009520\n",
      "Epoch: 5610/20000, Loss: 0.00009654\n",
      "Epoch: 5620/20000, Loss: 0.00009704\n",
      "Epoch: 5630/20000, Loss: 0.00010027\n",
      "Epoch: 5640/20000, Loss: 0.00009803\n",
      "Epoch: 5650/20000, Loss: 0.00009874\n",
      "Epoch: 5660/20000, Loss: 0.00010112\n",
      "Epoch: 5670/20000, Loss: 0.00010395\n",
      "Epoch: 5680/20000, Loss: 0.00010680\n",
      "Epoch: 5690/20000, Loss: 0.00010656\n",
      "Epoch: 5700/20000, Loss: 0.00024795\n",
      "Epoch: 5710/20000, Loss: 0.00013032\n",
      "Epoch: 5720/20000, Loss: 0.00011680\n",
      "Epoch: 5730/20000, Loss: 0.00010066\n",
      "Epoch: 5740/20000, Loss: 0.00009293\n",
      "Epoch: 5750/20000, Loss: 0.00009057\n",
      "Epoch: 5760/20000, Loss: 0.00009001\n",
      "Epoch: 5770/20000, Loss: 0.00009100\n",
      "Epoch: 5780/20000, Loss: 0.00009343\n",
      "Epoch: 5790/20000, Loss: 0.00010410\n",
      "Epoch: 5800/20000, Loss: 0.00025651\n",
      "Epoch: 5810/20000, Loss: 0.00012692\n",
      "Epoch: 5820/20000, Loss: 0.00010389\n",
      "Epoch: 5830/20000, Loss: 0.00009504\n",
      "Epoch: 5840/20000, Loss: 0.00009029\n",
      "Epoch: 5850/20000, Loss: 0.00008843\n",
      "Epoch: 5860/20000, Loss: 0.00008838\n",
      "Epoch: 5870/20000, Loss: 0.00009167\n",
      "Epoch: 5880/20000, Loss: 0.00010547\n",
      "Epoch: 5890/20000, Loss: 0.00030082\n",
      "Epoch: 5900/20000, Loss: 0.00014815\n",
      "Epoch: 5910/20000, Loss: 0.00010698\n",
      "Epoch: 5920/20000, Loss: 0.00009394\n",
      "Epoch: 5930/20000, Loss: 0.00008807\n",
      "Epoch: 5940/20000, Loss: 0.00008648\n",
      "Epoch: 5950/20000, Loss: 0.00008800\n",
      "Epoch: 5960/20000, Loss: 0.00008738\n",
      "Epoch: 5970/20000, Loss: 0.00009707\n",
      "Epoch: 5980/20000, Loss: 0.00012447\n",
      "Epoch: 5990/20000, Loss: 0.00012096\n",
      "Epoch: 6000/20000, Loss: 0.00019503\n",
      "Epoch: 6010/20000, Loss: 0.00010961\n",
      "Epoch: 6020/20000, Loss: 0.00008741\n",
      "Epoch: 6030/20000, Loss: 0.00008497\n",
      "Epoch: 6040/20000, Loss: 0.00008390\n",
      "Epoch: 6050/20000, Loss: 0.00008429\n",
      "Epoch: 6060/20000, Loss: 0.00010275\n",
      "Epoch: 6070/20000, Loss: 0.00021856\n",
      "Epoch: 6080/20000, Loss: 0.00011866\n",
      "Epoch: 6090/20000, Loss: 0.00009727\n",
      "Epoch: 6100/20000, Loss: 0.00008762\n",
      "Epoch: 6110/20000, Loss: 0.00008688\n",
      "Epoch: 6120/20000, Loss: 0.00008556\n",
      "Epoch: 6130/20000, Loss: 0.00010599\n",
      "Epoch: 6140/20000, Loss: 0.00016495\n",
      "Epoch: 6150/20000, Loss: 0.00010767\n",
      "Epoch: 6160/20000, Loss: 0.00009085\n",
      "Epoch: 6170/20000, Loss: 0.00011721\n",
      "Epoch: 6180/20000, Loss: 0.00012058\n",
      "Epoch: 6190/20000, Loss: 0.00009058\n",
      "Epoch: 6200/20000, Loss: 0.00008541\n",
      "Epoch: 6210/20000, Loss: 0.00008100\n",
      "Epoch: 6220/20000, Loss: 0.00007999\n",
      "Epoch: 6230/20000, Loss: 0.00009595\n",
      "Epoch: 6240/20000, Loss: 0.00029996\n",
      "Epoch: 6250/20000, Loss: 0.00012640\n",
      "Epoch: 6260/20000, Loss: 0.00010165\n",
      "Epoch: 6270/20000, Loss: 0.00009118\n",
      "Epoch: 6280/20000, Loss: 0.00008435\n",
      "Epoch: 6290/20000, Loss: 0.00007926\n",
      "Epoch: 6300/20000, Loss: 0.00007859\n",
      "Epoch: 6310/20000, Loss: 0.00008090\n",
      "Epoch: 6320/20000, Loss: 0.00009915\n",
      "Epoch: 6330/20000, Loss: 0.00016250\n",
      "Epoch: 6340/20000, Loss: 0.00013038\n",
      "Epoch: 6350/20000, Loss: 0.00009534\n",
      "Epoch: 6360/20000, Loss: 0.00008832\n",
      "Epoch: 6370/20000, Loss: 0.00008377\n",
      "Epoch: 6380/20000, Loss: 0.00007749\n",
      "Epoch: 6390/20000, Loss: 0.00007920\n",
      "Epoch: 6400/20000, Loss: 0.00013303\n",
      "Epoch: 6410/20000, Loss: 0.00011350\n",
      "Epoch: 6420/20000, Loss: 0.00008874\n",
      "Epoch: 6430/20000, Loss: 0.00008376\n",
      "Epoch: 6440/20000, Loss: 0.00008502\n",
      "Epoch: 6450/20000, Loss: 0.00011701\n",
      "Epoch: 6460/20000, Loss: 0.00008369\n",
      "Epoch: 6470/20000, Loss: 0.00008101\n",
      "Epoch: 6480/20000, Loss: 0.00007656\n",
      "Epoch: 6490/20000, Loss: 0.00007722\n",
      "Epoch: 6500/20000, Loss: 0.00020302\n",
      "Epoch: 6510/20000, Loss: 0.00015969\n",
      "Epoch: 6520/20000, Loss: 0.00008814\n",
      "Epoch: 6530/20000, Loss: 0.00008588\n",
      "Epoch: 6540/20000, Loss: 0.00024552\n",
      "Epoch: 6550/20000, Loss: 0.00011897\n",
      "Epoch: 6560/20000, Loss: 0.00009961\n",
      "Epoch: 6570/20000, Loss: 0.00007985\n",
      "Epoch: 6580/20000, Loss: 0.00007352\n",
      "Epoch: 6590/20000, Loss: 0.00007415\n",
      "Epoch: 6600/20000, Loss: 0.00007348\n",
      "Epoch: 6610/20000, Loss: 0.00007854\n",
      "Epoch: 6620/20000, Loss: 0.00007903\n",
      "Epoch: 6630/20000, Loss: 0.00010776\n",
      "Epoch: 6640/20000, Loss: 0.00015197\n",
      "Epoch: 6650/20000, Loss: 0.00008259\n",
      "Epoch: 6660/20000, Loss: 0.00008648\n",
      "Epoch: 6670/20000, Loss: 0.00007986\n",
      "Epoch: 6680/20000, Loss: 0.00007485\n",
      "Epoch: 6690/20000, Loss: 0.00007627\n",
      "Epoch: 6700/20000, Loss: 0.00007700\n",
      "Epoch: 6710/20000, Loss: 0.00015459\n",
      "Epoch: 6720/20000, Loss: 0.00012683\n",
      "Epoch: 6730/20000, Loss: 0.00008402\n",
      "Epoch: 6740/20000, Loss: 0.00008315\n",
      "Epoch: 6750/20000, Loss: 0.00018965\n",
      "Epoch: 6760/20000, Loss: 0.00011179\n",
      "Epoch: 6770/20000, Loss: 0.00009243\n",
      "Epoch: 6780/20000, Loss: 0.00007518\n",
      "Epoch: 6790/20000, Loss: 0.00007256\n",
      "Epoch: 6800/20000, Loss: 0.00007284\n",
      "Epoch: 6810/20000, Loss: 0.00007221\n",
      "Epoch: 6820/20000, Loss: 0.00007263\n",
      "Epoch: 6830/20000, Loss: 0.00007592\n",
      "Epoch: 6840/20000, Loss: 0.00010123\n",
      "Epoch: 6850/20000, Loss: 0.00013105\n",
      "Epoch: 6860/20000, Loss: 0.00020998\n",
      "Epoch: 6870/20000, Loss: 0.00010805\n",
      "Epoch: 6880/20000, Loss: 0.00007695\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 6890/20000, Loss: 0.00007156\n",
      "Epoch: 6900/20000, Loss: 0.00006950\n",
      "Epoch: 6910/20000, Loss: 0.00006880\n",
      "Epoch: 6920/20000, Loss: 0.00007075\n",
      "Epoch: 6930/20000, Loss: 0.00007080\n",
      "Epoch: 6940/20000, Loss: 0.00007596\n",
      "Epoch: 6950/20000, Loss: 0.00012802\n",
      "Epoch: 6960/20000, Loss: 0.00015224\n",
      "Epoch: 6970/20000, Loss: 0.00010804\n",
      "Epoch: 6980/20000, Loss: 0.00007929\n",
      "Epoch: 6990/20000, Loss: 0.00007213\n",
      "Epoch: 7000/20000, Loss: 0.00007123\n",
      "Epoch: 7010/20000, Loss: 0.00006821\n",
      "Epoch: 7020/20000, Loss: 0.00006805\n",
      "Epoch: 7030/20000, Loss: 0.00007375\n",
      "Epoch: 7040/20000, Loss: 0.00015502\n",
      "Epoch: 7050/20000, Loss: 0.00010184\n",
      "Epoch: 7060/20000, Loss: 0.00009751\n",
      "Epoch: 7070/20000, Loss: 0.00007919\n",
      "Epoch: 7080/20000, Loss: 0.00007113\n",
      "Epoch: 7090/20000, Loss: 0.00006832\n",
      "Epoch: 7100/20000, Loss: 0.00006841\n",
      "Epoch: 7110/20000, Loss: 0.00008944\n",
      "Epoch: 7120/20000, Loss: 0.00011836\n",
      "Epoch: 7130/20000, Loss: 0.00007397\n",
      "Epoch: 7140/20000, Loss: 0.00009842\n",
      "Epoch: 7150/20000, Loss: 0.00014675\n",
      "Epoch: 7160/20000, Loss: 0.00007913\n",
      "Epoch: 7170/20000, Loss: 0.00007649\n",
      "Epoch: 7180/20000, Loss: 0.00007280\n",
      "Epoch: 7190/20000, Loss: 0.00007087\n",
      "Epoch: 7200/20000, Loss: 0.00006932\n",
      "Epoch: 7210/20000, Loss: 0.00007445\n",
      "Epoch: 7220/20000, Loss: 0.00016846\n",
      "Epoch: 7230/20000, Loss: 0.00012140\n",
      "Epoch: 7240/20000, Loss: 0.00007900\n",
      "Epoch: 7250/20000, Loss: 0.00007023\n",
      "Epoch: 7260/20000, Loss: 0.00009532\n",
      "Epoch: 7270/20000, Loss: 0.00012345\n",
      "Epoch: 7280/20000, Loss: 0.00007861\n",
      "Epoch: 7290/20000, Loss: 0.00006884\n",
      "Epoch: 7300/20000, Loss: 0.00006647\n",
      "Epoch: 7310/20000, Loss: 0.00006637\n",
      "Epoch: 7320/20000, Loss: 0.00007007\n",
      "Epoch: 7330/20000, Loss: 0.00017365\n",
      "Epoch: 7340/20000, Loss: 0.00013979\n",
      "Epoch: 7350/20000, Loss: 0.00008292\n",
      "Epoch: 7360/20000, Loss: 0.00007575\n",
      "Epoch: 7370/20000, Loss: 0.00008196\n",
      "Epoch: 7380/20000, Loss: 0.00008374\n",
      "Epoch: 7390/20000, Loss: 0.00007206\n",
      "Epoch: 7400/20000, Loss: 0.00006891\n",
      "Epoch: 7410/20000, Loss: 0.00008952\n",
      "Epoch: 7420/20000, Loss: 0.00017426\n",
      "Epoch: 7430/20000, Loss: 0.00009168\n",
      "Epoch: 7440/20000, Loss: 0.00007563\n",
      "Epoch: 7450/20000, Loss: 0.00007014\n",
      "Epoch: 7460/20000, Loss: 0.00006737\n",
      "Epoch: 7470/20000, Loss: 0.00007126\n",
      "Epoch: 7480/20000, Loss: 0.00015025\n",
      "Epoch: 7490/20000, Loss: 0.00013792\n",
      "Epoch: 7500/20000, Loss: 0.00009065\n",
      "Epoch: 7510/20000, Loss: 0.00007304\n",
      "Epoch: 7520/20000, Loss: 0.00007369\n",
      "Epoch: 7530/20000, Loss: 0.00007386\n",
      "Epoch: 7540/20000, Loss: 0.00008771\n",
      "Epoch: 7550/20000, Loss: 0.00010751\n",
      "Epoch: 7560/20000, Loss: 0.00006919\n",
      "Epoch: 7570/20000, Loss: 0.00007173\n",
      "Epoch: 7580/20000, Loss: 0.00007239\n",
      "Epoch: 7590/20000, Loss: 0.00011789\n",
      "Epoch: 7600/20000, Loss: 0.00010888\n",
      "Epoch: 7610/20000, Loss: 0.00007655\n",
      "Epoch: 7620/20000, Loss: 0.00007587\n",
      "Epoch: 7630/20000, Loss: 0.00006834\n",
      "Epoch: 7640/20000, Loss: 0.00006594\n",
      "Epoch: 7650/20000, Loss: 0.00008963\n",
      "Epoch: 7660/20000, Loss: 0.00016433\n",
      "Epoch: 7670/20000, Loss: 0.00008661\n",
      "Epoch: 7680/20000, Loss: 0.00006905\n",
      "Epoch: 7690/20000, Loss: 0.00006686\n",
      "Epoch: 7700/20000, Loss: 0.00006523\n",
      "Epoch: 7710/20000, Loss: 0.00006488\n",
      "Epoch: 7720/20000, Loss: 0.00006736\n",
      "Epoch: 7730/20000, Loss: 0.00014581\n",
      "Epoch: 7740/20000, Loss: 0.00018113\n",
      "Epoch: 7750/20000, Loss: 0.00007979\n",
      "Epoch: 7760/20000, Loss: 0.00008031\n",
      "Epoch: 7770/20000, Loss: 0.00006591\n",
      "Epoch: 7780/20000, Loss: 0.00006274\n",
      "Epoch: 7790/20000, Loss: 0.00006168\n",
      "Epoch: 7800/20000, Loss: 0.00006178\n",
      "Epoch: 7810/20000, Loss: 0.00006193\n",
      "Epoch: 7820/20000, Loss: 0.00006773\n",
      "Epoch: 7830/20000, Loss: 0.00017050\n",
      "Epoch: 7840/20000, Loss: 0.00010763\n",
      "Epoch: 7850/20000, Loss: 0.00009881\n",
      "Epoch: 7860/20000, Loss: 0.00007029\n",
      "Epoch: 7870/20000, Loss: 0.00006477\n",
      "Epoch: 7880/20000, Loss: 0.00006428\n",
      "Epoch: 7890/20000, Loss: 0.00008632\n",
      "Epoch: 7900/20000, Loss: 0.00009002\n",
      "Epoch: 7910/20000, Loss: 0.00006637\n",
      "Epoch: 7920/20000, Loss: 0.00006306\n",
      "Epoch: 7930/20000, Loss: 0.00006788\n",
      "Epoch: 7940/20000, Loss: 0.00019224\n",
      "Epoch: 7950/20000, Loss: 0.00011278\n",
      "Epoch: 7960/20000, Loss: 0.00008155\n",
      "Epoch: 7970/20000, Loss: 0.00006998\n",
      "Epoch: 7980/20000, Loss: 0.00006700\n",
      "Epoch: 7990/20000, Loss: 0.00008539\n",
      "Epoch: 8000/20000, Loss: 0.00008901\n",
      "Epoch: 8010/20000, Loss: 0.00006932\n",
      "Epoch: 8020/20000, Loss: 0.00006349\n",
      "Epoch: 8030/20000, Loss: 0.00007031\n",
      "Epoch: 8040/20000, Loss: 0.00023690\n",
      "Epoch: 8050/20000, Loss: 0.00011527\n",
      "Epoch: 8060/20000, Loss: 0.00007274\n",
      "Epoch: 8070/20000, Loss: 0.00006587\n",
      "Epoch: 8080/20000, Loss: 0.00006430\n",
      "Epoch: 8090/20000, Loss: 0.00006456\n",
      "Epoch: 8100/20000, Loss: 0.00007649\n",
      "Epoch: 8110/20000, Loss: 0.00013659\n",
      "Epoch: 8120/20000, Loss: 0.00007830\n",
      "Epoch: 8130/20000, Loss: 0.00006241\n",
      "Epoch: 8140/20000, Loss: 0.00006645\n",
      "Epoch: 8150/20000, Loss: 0.00023808\n",
      "Epoch: 8160/20000, Loss: 0.00011987\n",
      "Epoch: 8170/20000, Loss: 0.00006661\n",
      "Epoch: 8180/20000, Loss: 0.00006364\n",
      "Epoch: 8190/20000, Loss: 0.00006436\n",
      "Epoch: 8200/20000, Loss: 0.00007349\n",
      "Epoch: 8210/20000, Loss: 0.00014416\n",
      "Epoch: 8220/20000, Loss: 0.00008123\n",
      "Epoch: 8230/20000, Loss: 0.00006567\n",
      "Epoch: 8240/20000, Loss: 0.00006073\n",
      "Epoch: 8250/20000, Loss: 0.00005999\n",
      "Epoch: 8260/20000, Loss: 0.00008044\n",
      "Epoch: 8270/20000, Loss: 0.00019683\n",
      "Epoch: 8280/20000, Loss: 0.00007138\n",
      "Epoch: 8290/20000, Loss: 0.00007983\n",
      "Epoch: 8300/20000, Loss: 0.00006328\n",
      "Epoch: 8310/20000, Loss: 0.00005951\n",
      "Epoch: 8320/20000, Loss: 0.00006383\n",
      "Epoch: 8330/20000, Loss: 0.00018000\n",
      "Epoch: 8340/20000, Loss: 0.00010192\n",
      "Epoch: 8350/20000, Loss: 0.00006364\n",
      "Epoch: 8360/20000, Loss: 0.00006359\n"
     ]
    }
   ],
   "source": [
    "# Create LSTM instance\n",
    "lstm = LSTM(input_size, hidden_size, output_size)\n",
    "\n",
    "# Loss and optimizer\n",
    "criterion = nn.MSELoss()\n",
    "optimizer = torch.optim.Adam(lstm.parameters(), lr=0.01)\n",
    "\n",
    "# Training loop\n",
    "for epoch in range(num_epochs):\n",
    "    # Set initial hidden state and cell state\n",
    "    hidden = torch.zeros(1, batch_size, hidden_size)\n",
    "    cell = torch.zeros(1, batch_size, hidden_size)\n",
    "\n",
    "    # Forward pass\n",
    "    output, (hidden, cell) = lstm(input_tensor, (hidden, cell))\n",
    "    loss = criterion(output, target_tensor)\n",
    "\n",
    "    # Backward and optimize\n",
    "    optimizer.zero_grad()\n",
    "    loss.backward()\n",
    "    optimizer.step()\n",
    "\n",
    "    # Print progress\n",
    "    if (epoch+1) % 10 == 0:\n",
    "        print(f'Epoch: {epoch+1}/{num_epochs}, Loss: {loss.item():.8f}')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c2ec96ee",
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import scipy.io\n",
    "from sklearn.model_selection import train_test_split\n",
    "\n",
    "# Load the .mat file\n",
    "mat_data = scipy.io.loadmat('NLS.mat')\n",
    "\n",
    "# Following is the code to plot the data u vs x and t. u is 256*100\n",
    "# matrix. Use first 75 columns for training and 25 for testing :)\n",
    "\n",
    "# Access the variables stored in the .mat file\n",
    "# The variable names in the .mat file become keys in the loaded dictionary\n",
    "x = mat_data['x']\n",
    "t = mat_data['tt']\n",
    "u1 = mat_data['uu']\n",
    "\n",
    "# Use the loaded variables as needed\n",
    "print(x.shape)\n",
    "print(t.shape)\n",
    "print(u.shape)\n",
    "\n",
    "X, T = np.meshgrid(x, t)\n",
    "# Define custom color levels\n",
    "c_levels = np.linspace(np.min(u1), np.max(u1), 100)\n",
    "\n",
    "# Plot the contour\n",
    "plt.figure()\n",
    "plt.figure(figsize=(15, 5))\n",
    "plt.contourf(T, X, u1.T, levels=c_levels, cmap='coolwarm')\n",
    "plt.xlabel('t')\n",
    "plt.ylabel('x')\n",
    "plt.title('Schrondinger-Equation')\n",
    "plt.colorbar()  # Add a colorbar for the contour levels\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "38d5287d",
   "metadata": {},
   "outputs": [],
   "source": [
    "print(test_tensor.shape)\n",
    "prediction_tensor = torch.zeros(1, 40, 256).float()\n",
    "print(prediction_tensor.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "62122ca0",
   "metadata": {},
   "outputs": [],
   "source": [
    "with torch.no_grad():\n",
    "    hidden_pred = torch.zeros(1, batch_size, hidden_size)\n",
    "    cell_pred = torch.zeros(1, batch_size, hidden_size)\n",
    "    prediction, _ = lstm(test_tensor, (hidden_pred, cell_pred))\n",
    "    prediction = prediction.view(1, 1, 256).float()\n",
    "    prediction_tensor[:, 0, :] = prediction\n",
    "    for i in range(19):\n",
    "        hidden_pred = torch.zeros(1, batch_size, hidden_size)\n",
    "        cell_pred = torch.zeros(1, batch_size, hidden_size)\n",
    "        prediction, _ = lstm(test_tensor, (hidden_pred, cell_pred))\n",
    "        prediction = prediction.view(1, 1, 256).float()\n",
    "        prediction_tensor[:, i+1, :] = prediction"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "d92ce73a",
   "metadata": {},
   "source": [
    "### errors"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "17bfc232",
   "metadata": {},
   "outputs": [],
   "source": [
    "# true solution\n",
    "h_true = np.abs(u1)\n",
    "h_true = h_true.T\n",
    "print(h_true.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "8220399b",
   "metadata": {},
   "outputs": [],
   "source": [
    "# exact\n",
    "u_test_full = h_true[161:201, :]\n",
    "print(u_test_full.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "08f43ef7",
   "metadata": {},
   "outputs": [],
   "source": [
    "\n",
    "k1 = (prediction_tensor - u_test_full)**2\n",
    "u_test_full_tensor = torch.tensor(u_test_full**2)\n",
    "prediction_tensor.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "a99fde35",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Compute the relative L2 error norm (generalization error)\n",
    "relative_error_test = torch.mean(k1)/ torch.mean(u_test_full_tensor)\n",
    "\n",
    "print(\"Relative Error Test: \", relative_error_test.item(), \"%\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "7806eca5",
   "metadata": {},
   "outputs": [],
   "source": [
    "R_abs = torch.max(prediction_tensor-u_test_full)\n",
    "print(R_abs)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "2f50e7fd",
   "metadata": {},
   "source": [
    "### explained variance score"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "bb94ac5a",
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "\n",
    "a = prediction_tensor\n",
    "b = u_test_full\n",
    "# Assuming 'a' is your predicted values (model's predictions) and 'b' is the true values (ground truth)\n",
    "# Make sure 'a' and 'b' are PyTorch tensors\n",
    "b = torch.tensor(b)\n",
    "# Calculate the mean of 'b'\n",
    "mean_b = torch.mean(b)\n",
    "\n",
    "# Calculate the Explained Variance Score\n",
    "numerator = torch.var(b - a)  # Variance of the differences between 'b' and 'a'\n",
    "denominator = torch.var(b)    # Variance of 'b'\n",
    "evs = 1 - numerator / denominator\n",
    "\n",
    "print(\"Explained Variance Score:\", evs.item())\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "8bc77cb2",
   "metadata": {},
   "source": [
    "### mean absolute error"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "d423eb42",
   "metadata": {},
   "outputs": [],
   "source": [
    "R_mean = torch.mean(torch.abs(prediction_tensor - u_test_full))\n",
    "print(R_mean)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c0828980",
   "metadata": {},
   "outputs": [],
   "source": [
    "prediction_tensor = torch.squeeze(prediction_tensor)\n",
    "h = np.abs(u)\n",
    "h.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b4e2a530",
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "import numpy as np\n",
    "import torch\n",
    "\n",
    "# Create the figure and axis objects with reduced width\n",
    "fig, ax = plt.subplots(figsize=(5, 5))  # You can adjust the width (7 inches) and height (5 inches) as needed\n",
    "\n",
    "# # Make sure the font is Times Roman\n",
    "# plt.rcParams['font.family'] = 'Times New Roman'\n",
    "\n",
    "# # Perform the prediction\n",
    "# with torch.no_grad():\n",
    "#     prediction = lem(test_tensor)\n",
    "\n",
    "final_time_output = prediction_tensor[-38, :]\n",
    "final_out = final_time_output.detach().numpy().reshape(-1, 1)\n",
    "final_true = h[-38, :].reshape(-1, 1)\n",
    "print(final_out.shape)\n",
    "print(final_true.shape)\n",
    "\n",
    "# Plot the data with red and blue lines, one with dotted and one with solid style\n",
    "ax.plot(x.T, final_out, color='red', linestyle='dotted', linewidth=12, label='Prediction')\n",
    "ax.plot(x.T, final_true, color='blue', linestyle='solid', linewidth=7, label='True')\n",
    "\n",
    "# Set the axis labels with bold font weight\n",
    "ax.set_xlabel(r\"${x}$\", fontsize=26, color='black', fontdict={'weight': 'bold'})\n",
    "ax.set_ylabel(r\"${|u(x, t)|}$\", fontsize=26, color='black', fontdict={'weight': 'bold'})\n",
    "\n",
    "# Set the title with bold font weight\n",
    "ax.set_title(r\"${t = 1.28}$\", fontsize=26, color='black', fontweight='bold')\n",
    "\n",
    "# Set the number of ticks for x-axis and y-axis to 3\n",
    "ax.set_xticks([-5, 0, 5])\n",
    "ax.set_yticks([0, 2, 4])\n",
    "\n",
    "# Set tick labels fontweight to bold and increase font size\n",
    "ax.tick_params(axis='both', which='major', labelsize=20, width=2, length=10)\n",
    "\n",
    "# # Set the fontweight for tick labels to bold\n",
    "# for tick in ax.get_xticklabels() + ax.get_yticklabels():\n",
    "#     tick.set_weight('bold')\n",
    "\n",
    "# Set the spines linewidth to bold\n",
    "ax.spines['top'].set_linewidth(2)\n",
    "ax.spines['right'].set_linewidth(2)\n",
    "ax.spines['bottom'].set_linewidth(2)\n",
    "ax.spines['left'].set_linewidth(2)\n",
    "\n",
    "\n",
    "# Increase font size for x and y axis numbers\n",
    "ax.tick_params(axis='both', which='major', labelsize=24)\n",
    "\n",
    "# Set the legend\n",
    "# ax.legend()\n",
    "\n",
    "plt.savefig('LEM_1.28_20.pdf', dpi=500, bbox_inches=\"tight\")\n",
    "\n",
    "# Show the plot\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6ceab7fb",
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "import numpy as np\n",
    "import torch\n",
    "\n",
    "# Create the figure and axis objects with reduced width\n",
    "fig, ax = plt.subplots(figsize=(5, 5))  # You can adjust the width (7 inches) and height (5 inches) as needed\n",
    "\n",
    "# # Make sure the font is Times Roman\n",
    "# plt.rcParams['font.family'] = 'Times New Roman'\n",
    "\n",
    "# # Perform the prediction\n",
    "# with torch.no_grad():\n",
    "#     prediction = lem(test_tensor)\n",
    "\n",
    "\n",
    "final_time_output = prediction_tensor[-3, :]\n",
    "final_out = final_time_output.detach().numpy().reshape(-1, 1)\n",
    "final_true = h[-3, :].reshape(-1, 1)\n",
    "print(final_out.shape)\n",
    "print(final_true.shape)\n",
    "\n",
    "# Plot the data with red and blue lines, one with dotted and one with solid style\n",
    "ax.plot(x.T, final_out, color='red', linestyle='dotted', linewidth=12, label='Prediction')\n",
    "ax.plot(x.T, final_true, color='blue', linestyle='solid', linewidth=7, label='True')\n",
    "\n",
    "# Set the axis labels with bold font weight\n",
    "ax.set_xlabel(r\"${x}$\", fontsize=26, color='black', fontdict={'weight': 'bold'})\n",
    "ax.set_ylabel(r\"${u(x, t)}$\", fontsize=26, color='black', fontdict={'weight': 'bold'})\n",
    "\n",
    "# Set the title with bold font weight\n",
    "ax.set_title(r\"${t = 1.5}$\", fontsize=26, color='black', fontweight='bold')\n",
    "\n",
    "# Set the number of ticks for x-axis and y-axis to 3\n",
    "ax.set_xticks([-5, 0, 5])\n",
    "ax.set_yticks([0, 2, 4])\n",
    "\n",
    "# Set tick labels fontweight to bold and increase font size\n",
    "ax.tick_params(axis='both', which='major', labelsize=20, width=2, length=10)\n",
    "\n",
    "# # Set the fontweight for tick labels to bold\n",
    "# for tick in ax.get_xticklabels() + ax.get_yticklabels():\n",
    "#     tick.set_weight('bold')\n",
    "\n",
    "# Set the spines linewidth to bold\n",
    "ax.spines['top'].set_linewidth(2)\n",
    "ax.spines['right'].set_linewidth(2)\n",
    "ax.spines['bottom'].set_linewidth(2)\n",
    "ax.spines['left'].set_linewidth(2)\n",
    "\n",
    "\n",
    "# Increase font size for x and y axis numbers\n",
    "ax.tick_params(axis='both', which='major', labelsize=24)\n",
    "\n",
    "# Set the legend\n",
    "# ax.legend()\n",
    "\n",
    "plt.savefig('LSTM_1.5_20.pdf', dpi=500, bbox_inches=\"tight\")\n",
    "\n",
    "# Show the plot\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "50cd5acb",
   "metadata": {},
   "outputs": [],
   "source": [
    "conc_u = torch.squeeze(input_tensor)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6bdb507f",
   "metadata": {},
   "outputs": [],
   "source": [
    "concatenated_tensor = torch.cat((conc_u, prediction_tensor), dim=0)\n",
    "\n",
    "t1 = np.linspace(0, 1.5707 , 200)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "43a4873c",
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from matplotlib.ticker import FixedLocator\n",
    "\n",
    "# Assuming you have defined concatenated_tensor as a PyTorch tensor\n",
    "# concatenated_tensor = torch.cat((tensor1, tensor2), dim=0)\n",
    "\n",
    "# Convert concatenated_tensor to a NumPy array\n",
    "concatenated_array = concatenated_tensor.numpy()\n",
    "\n",
    "# Define custom color levels\n",
    "x = np.linspace(0, 1, concatenated_array.shape[1])  # Replace 0 and 1 with your actual x range\n",
    "t = np.linspace(0, 1, concatenated_array.shape[0])  # Replace 0 and 1 with your actual t range\n",
    "X, T = np.meshgrid(x, t1)\n",
    "\n",
    "# Define custom color levels using the minimum and maximum from the NumPy array\n",
    "c_levels = np.linspace(np.min(concatenated_array), np.max(concatenated_array), 400)\n",
    "\n",
    "# Plot the contour with interpolated data\n",
    "plt.figure(figsize=(20, 5))\n",
    "plt.pcolormesh(T, X, concatenated_array, shading='auto', cmap='twilight')\n",
    "\n",
    "# Set the fontweight for axis labels to regular (not bold)\n",
    "plt.xlabel(\"$t$\", fontsize=26)\n",
    "plt.ylabel(\"$x$\", fontsize=26)\n",
    "plt.title(\"$|u(x, t)|$\", fontsize=26)\n",
    "\n",
    "# Set tick labels fontweight to regular (not bold) and increase font size\n",
    "plt.tick_params(axis='both', which='major', labelsize=20, width=3, length=10)\n",
    "\n",
    "# Set the fontweight for tick labels to regular (not bold)\n",
    "for tick in plt.gca().get_xticklabels() + plt.gca().get_yticklabels():\n",
    "    tick.set_weight('normal')\n",
    "\n",
    "# Set the number of ticks for x-axis and y-axis to 5\n",
    "num_ticks = 5\n",
    "x_ticks = np.linspace(np.min(T), np.max(T), num_ticks)\n",
    "y_ticks = np.linspace(np.min(X), np.max(X), num_ticks)\n",
    "\n",
    "plt.gca().xaxis.set_major_locator(FixedLocator(x_ticks))\n",
    "plt.gca().yaxis.set_major_locator(FixedLocator(y_ticks))\n",
    "\n",
    "cbar1 = plt.colorbar()\n",
    "# Set the number of ticks for the color bar with uniformly distributed numbers\n",
    "num_ticks = 5\n",
    "c_ticks = np.linspace(np.min(concatenated_array), np.max(concatenated_array), num_ticks)\n",
    "cbar1.set_ticks(c_ticks)\n",
    "\n",
    "# Set the fontweight and fontsize for color bar tick labels\n",
    "for t in cbar1.ax.get_yticklabels():\n",
    "    t.set_weight('normal')\n",
    "    t.set_fontsize(26)  # Increase the font size for color bar tick labels\n",
    "\n",
    "# Increase the size of numbers on axis and color bar\n",
    "plt.xticks(fontsize=26)\n",
    "plt.yticks(fontsize=26)\n",
    "\n",
    "# Increase the tick size and width of the color bar\n",
    "cbar1.ax.tick_params(axis='both', which='major', labelsize=30, width=3,  length=10)\n",
    "\n",
    "# Add a dotted line at t = 0.8\n",
    "plt.axvline(x=1.26449, color='black', linestyle='dotted', linewidth=5)\n",
    "\n",
    "#plt.savefig('Contour_LEM_20.pdf', dpi=500, bbox_inches=\"tight\")\n",
    "plt.savefig('contour_LSTM_20.jpeg', dpi=500, bbox_inches=\"tight\")\n",
    "# Show the plot\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "dac4415c",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "pytorch",
   "language": "python",
   "name": "pytorch"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.10"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
