{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "1965fde6",
   "metadata": {},
   "source": [
    "### In this notebook we perform Centralized learning, almost like 02.Individual_Training. \n",
    "\n",
    "In centralized learning there is an entity that has access to data from all base stations.\n",
    "Here, there is no option to filter out any base station.\n",
    "In this setting we also measure the energy consumption using the Carbontracker tool."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "c11879af",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:27.728678Z",
     "end_time": "2023-08-24T11:46:27.748751Z"
    }
   },
   "outputs": [],
   "source": [
    "import sys\n",
    "import os\n",
    "\n",
    "from pathlib import Path\n",
    "\n",
    "parent = Path(os.path.abspath(\"\")).resolve().parents[0]\n",
    "if parent not in sys.path:\n",
    "    sys.path.insert(0, str(parent))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "41f6cbc7",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:27.742751Z",
     "end_time": "2023-08-24T11:46:29.098822Z"
    }
   },
   "outputs": [],
   "source": [
    "import random\n",
    "\n",
    "import numpy as np\n",
    "import torch\n",
    "import pandas as pd\n",
    "\n",
    "\n",
    "from matplotlib import pyplot as plt\n",
    "\n",
    "from argparse import Namespace"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "9d264d76",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:29.100823Z",
     "end_time": "2023-08-24T11:46:29.337894Z"
    }
   },
   "outputs": [],
   "source": [
    "from ml.utils.data_utils import read_data, generate_time_lags, time_to_feature, handle_nans, to_Xy, \\\n",
    "    to_torch_dataset, to_timeseries_rep, assign_statistics, \\\n",
    "    to_train_val, scale_features, get_data_by_area, remove_identifiers, get_exogenous_data_by_area, handle_outliers"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "48559200",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:29.338896Z",
     "end_time": "2023-08-24T11:46:29.353096Z"
    }
   },
   "outputs": [],
   "source": [
    "from ml.utils.train_utils import train, test"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "67c5e96b",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:29.354095Z",
     "end_time": "2023-08-24T11:46:29.379095Z"
    }
   },
   "outputs": [],
   "source": [
    "from ml.models.mlp import MLP\n",
    "from ml.models.rnn import RNN\n",
    "from ml.models.lstm import LSTM\n",
    "from ml.models.gru import GRU\n",
    "from ml.models.cnn import CNN\n",
    "from ml.models.rnn_autoencoder import DualAttentionAutoEncoder\n",
    "from ml.utils.helpers import accumulate_metric"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "81e60d1f",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:29.374096Z",
     "end_time": "2023-08-24T11:46:29.391095Z"
    }
   },
   "outputs": [],
   "source": [
    "args = Namespace(\n",
    "    data_path='../mydatase/marketData/train_dataset.csv',  # dataset\n",
    "    # data_path_test=['../mydatase/marketData/Stort1_test.csv'],  # test dataset 这里实际并没有用上\n",
    "    test_size=0.2,  # validation size\n",
    "    targets=['Weekly_Sales'],  # the target columns\n",
    "    num_lags=5,  # the number of past observations to feed as input\n",
    "    filter_bs=None,  # whether to use a single bs for training. It will be changed dynamically\n",
    "    identifier='District',  # the column name that identifies a bs\n",
    "    nan_constant=0,  # the constant to transform nan values\n",
    "    x_scaler='standard',  # x_scaler\n",
    "    y_scaler='standard',  # y_scaler\n",
    "    outlier_detection=None,  # whether to perform flooring and capping\n",
    "    criterion='mse',  # optimization criterion, mse or l1\n",
    "    epochs=150,  # the number of maximum epochs\n",
    "    lr=0.001,  # learning rate\n",
    "    optimizer='adam',  # the optimizer, it can be sgd or adam\n",
    "    batch_size=128,  # the batch size to use\n",
    "    early_stopping=True,  # whether to use early stopping\n",
    "    patience=50,  # patience value for the early stopping parameter (if specified)\n",
    "    max_grad_norm=0.0,  # whether to clip grad norm\n",
    "    reg1=0.0,  # l1 regularization\n",
    "    reg2=0.0,  # l2 regularization\n",
    "    plot_history=True,  # plot loss history\n",
    "    cuda=True,  # whether to use gpu\n",
    "    seed=0,  # reproducibility\n",
    "    assign_stats=None,\n",
    "    # whether to use statistics as exogenous data, [\"mean\", \"median\", \"std\", \"variance\", \"kurtosis\", \"skew\"]\n",
    "    use_time_features=False  # whether to use datetime features\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "0660fe4a",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:29.385095Z",
     "end_time": "2023-08-24T11:46:29.401095Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Script arguments: Namespace(data_path='../mydatase/marketData/train_dataset.csv', test_size=0.2, targets=['Weekly_Sales'], num_lags=5, filter_bs=None, identifier='District', nan_constant=0, x_scaler='standard', y_scaler='standard', outlier_detection=None, criterion='mse', epochs=150, lr=0.001, optimizer='adam', batch_size=128, early_stopping=True, patience=50, max_grad_norm=0.0, reg1=0.0, reg2=0.0, plot_history=True, cuda=True, seed=0, assign_stats=None, use_time_features=False)\n",
      "\n"
     ]
    }
   ],
   "source": [
    "print(f\"Script arguments: {args}\\n\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "1e574ee4",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:29.421095Z",
     "end_time": "2023-08-24T11:46:29.458096Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Using cuda\n"
     ]
    }
   ],
   "source": [
    "device = \"cuda\" if args.cuda and torch.cuda.is_available() else \"cpu\"\n",
    "print(f\"Using {device}\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "26b20d98",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:29.433096Z",
     "end_time": "2023-08-24T11:46:29.458096Z"
    }
   },
   "outputs": [],
   "source": [
    "# Outlier detection specification\n",
    "# if args.outlier_detection is not None:\n",
    "#     outlier_columns = ['rb_down', 'rb_up', 'down', 'up']\n",
    "#     outlier_kwargs = {\"ElBorn\": (10, 90), \"LesCorts\": (10, 90), \"PobleSec\": (5, 95)}\n",
    "#     args.outlier_columns = outlier_columns\n",
    "#     args.outlier_kwargs = outlier_kwargs"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "49d661a6",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:29.448095Z",
     "end_time": "2023-08-24T11:46:29.469097Z"
    }
   },
   "outputs": [],
   "source": [
    "def seed_all():\n",
    "    # ensure reproducibility\n",
    "    random.seed(args.seed)\n",
    "    np.random.seed(args.seed)\n",
    "    torch.manual_seed(args.seed)\n",
    "    torch.cuda.manual_seed_all(args.seed)\n",
    "    torch.backends.cudnn.deterministic = True\n",
    "    torch.backends.cudnn.benchmark = False"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "e3b0bafa",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:29.463096Z",
     "end_time": "2023-08-24T11:46:29.507095Z"
    }
   },
   "outputs": [],
   "source": [
    "seed_all()"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "cebd59b3",
   "metadata": {},
   "source": [
    "### By setting filter_bs to None, the preprocessing pipeline returns data from all three base stations"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "acf0480d",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:29.487095Z",
     "end_time": "2023-08-24T11:46:29.518095Z"
    }
   },
   "outputs": [],
   "source": [
    "def make_preprocessing(filter_bs=None):\n",
    "    \"\"\"Preprocess a given .csv\"\"\"\n",
    "    # read data\n",
    "    df = read_data(args.data_path, filter_data=filter_bs)\n",
    "    # handle nans\n",
    "    df = handle_nans(train_data=df, constant=args.nan_constant,\n",
    "                     identifier=args.identifier)\n",
    "    # split to train/validation\n",
    "    train_data, val_data = to_train_val(df)\n",
    "\n",
    "    # handle outliers (if specified)\n",
    "    if args.outlier_detection is not None:\n",
    "        train_data = handle_outliers(df=train_data, columns=args.outlier_columns,\n",
    "                                     identifier=args.identifier, kwargs=args.outlier_kwargs)\n",
    "\n",
    "    # get X and y\n",
    "    X_train, X_val, y_train, y_val = to_Xy(train_data=train_data, val_data=val_data,\n",
    "                                          targets=args.targets)\n",
    "\n",
    "    # scale X\n",
    "    X_train, X_val, x_scaler = scale_features(train_data=X_train, val_data=X_val,\n",
    "                                             scaler=args.x_scaler,\n",
    "                                              # per_area=True, # the features are scaled locally\n",
    "                                              identifier=args.identifier)\n",
    "    # scale y\n",
    "    y_train, y_val, y_scaler = scale_features(train_data=y_train, val_data=y_val,\n",
    "                                             scaler=args.y_scaler,\n",
    "                                              # per_area=True, # the features are scaled locally\n",
    "                                              identifier=args.identifier)\n",
    "\n",
    "    # generate time lags\n",
    "    X_train = generate_time_lags(X_train, args.num_lags)\n",
    "    X_val = generate_time_lags(X_val, args.num_lags)\n",
    "    y_train = generate_time_lags(y_train, args.num_lags, is_y=True)\n",
    "    y_val = generate_time_lags(y_val, args.num_lags, is_y=True)\n",
    "\n",
    "    # get datetime features as exogenous data\n",
    "    date_time_df_train = time_to_feature(\n",
    "        X_train, args.use_time_features, identifier=args.identifier\n",
    "    )\n",
    "    date_time_df_val = time_to_feature(\n",
    "        X_val, args.use_time_features, identifier=args.identifier\n",
    "    )\n",
    "\n",
    "    # get statistics as exogenous data\n",
    "    stats_df_train = assign_statistics(X_train, args.assign_stats, args.num_lags,\n",
    "                                       targets=args.targets, identifier=args.identifier)\n",
    "    stats_df_val = assign_statistics(X_val, args.assign_stats, args.num_lags,\n",
    "                                       targets=args.targets, identifier=args.identifier)\n",
    "\n",
    "    # concat the exogenous features (if any) to a single dataframe\n",
    "    if date_time_df_train is not None or stats_df_train is not None:\n",
    "        exogenous_data_train = pd.concat([date_time_df_train, stats_df_train], axis=1)\n",
    "        # remove duplicate columns (if any)\n",
    "        exogenous_data_train = exogenous_data_train.loc[:, ~exogenous_data_train.columns.duplicated()].copy()\n",
    "        assert len(exogenous_data_train) == len(X_train) == len(y_train)\n",
    "    else:\n",
    "        exogenous_data_train = None\n",
    "    if date_time_df_val is not None or stats_df_val is not None:\n",
    "        exogenous_data_val = pd.concat([date_time_df_val, stats_df_val], axis=1)\n",
    "        exogenous_data_val = exogenous_data_val.loc[:, ~exogenous_data_val.columns.duplicated()].copy()\n",
    "        assert len(exogenous_data_val) == len(X_val) == len(y_val)\n",
    "    else:\n",
    "        exogenous_data_val = None\n",
    "\n",
    "    return X_train, X_val, y_train, y_val, exogenous_data_train, exogenous_data_val, x_scaler, y_scaler"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "85eb3771",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:29.494095Z",
     "end_time": "2023-08-24T11:46:30.923786Z"
    }
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:46:29,513 | data_utils.py:382 | Observations info in Stort1\n",
      "INFO logger 2023-08-24 11:46:29,514 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,514 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,515 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,516 | data_utils.py:382 | Observations info in Stort2\n",
      "INFO logger 2023-08-24 11:46:29,516 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,517 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,517 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,518 | data_utils.py:382 | Observations info in Stort3\n",
      "INFO logger 2023-08-24 11:46:29,518 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,519 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,519 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,520 | data_utils.py:382 | Observations info in Stort4\n",
      "INFO logger 2023-08-24 11:46:29,521 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,521 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,522 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,523 | data_utils.py:382 | Observations info in Stort5\n",
      "INFO logger 2023-08-24 11:46:29,523 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,524 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,525 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,526 | data_utils.py:382 | Observations info in Stort6\n",
      "INFO logger 2023-08-24 11:46:29,527 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,527 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,527 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,528 | data_utils.py:382 | Observations info in Stort7\n",
      "INFO logger 2023-08-24 11:46:29,529 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,529 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,529 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,530 | data_utils.py:382 | Observations info in Stort8\n",
      "INFO logger 2023-08-24 11:46:29,531 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,531 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,531 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,532 | data_utils.py:382 | Observations info in Stort9\n",
      "INFO logger 2023-08-24 11:46:29,533 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,533 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,533 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,536 | data_utils.py:382 | Observations info in Stort10\n",
      "INFO logger 2023-08-24 11:46:29,536 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,536 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,537 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,538 | data_utils.py:382 | Observations info in Stort11\n",
      "INFO logger 2023-08-24 11:46:29,538 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,538 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,539 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,540 | data_utils.py:382 | Observations info in Stort12\n",
      "INFO logger 2023-08-24 11:46:29,541 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,542 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,542 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,543 | data_utils.py:382 | Observations info in Stort13\n",
      "INFO logger 2023-08-24 11:46:29,544 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,544 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,544 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,545 | data_utils.py:382 | Observations info in Stort14\n",
      "INFO logger 2023-08-24 11:46:29,546 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,547 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,547 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,548 | data_utils.py:382 | Observations info in Stort15\n",
      "INFO logger 2023-08-24 11:46:29,549 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,549 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,550 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,551 | data_utils.py:382 | Observations info in Stort16\n",
      "INFO logger 2023-08-24 11:46:29,552 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,552 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,552 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,553 | data_utils.py:382 | Observations info in Stort17\n",
      "INFO logger 2023-08-24 11:46:29,554 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,554 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,554 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,555 | data_utils.py:382 | Observations info in Stort18\n",
      "INFO logger 2023-08-24 11:46:29,557 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,557 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,558 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,558 | data_utils.py:382 | Observations info in Stort19\n",
      "INFO logger 2023-08-24 11:46:29,559 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,559 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,559 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,560 | data_utils.py:382 | Observations info in Stort20\n",
      "INFO logger 2023-08-24 11:46:29,561 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,561 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,561 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,562 | data_utils.py:382 | Observations info in Stort21\n",
      "INFO logger 2023-08-24 11:46:29,563 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,563 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,564 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,565 | data_utils.py:382 | Observations info in Stort22\n",
      "INFO logger 2023-08-24 11:46:29,565 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,566 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,566 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,567 | data_utils.py:382 | Observations info in Stort23\n",
      "INFO logger 2023-08-24 11:46:29,568 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,568 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,568 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,569 | data_utils.py:382 | Observations info in Stort24\n",
      "INFO logger 2023-08-24 11:46:29,569 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,570 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,570 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,571 | data_utils.py:382 | Observations info in Stort25\n",
      "INFO logger 2023-08-24 11:46:29,571 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,572 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,572 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,573 | data_utils.py:382 | Observations info in Stort26\n",
      "INFO logger 2023-08-24 11:46:29,573 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,574 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,574 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,575 | data_utils.py:382 | Observations info in Stort27\n",
      "INFO logger 2023-08-24 11:46:29,576 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,577 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,577 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,578 | data_utils.py:382 | Observations info in Stort28\n",
      "INFO logger 2023-08-24 11:46:29,579 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,579 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,579 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,580 | data_utils.py:382 | Observations info in Stort29\n",
      "INFO logger 2023-08-24 11:46:29,581 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,581 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,581 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,582 | data_utils.py:382 | Observations info in Stort30\n",
      "INFO logger 2023-08-24 11:46:29,582 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,583 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,583 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,584 | data_utils.py:382 | Observations info in Stort31\n",
      "INFO logger 2023-08-24 11:46:29,585 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,586 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,586 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,587 | data_utils.py:382 | Observations info in Stort32\n",
      "INFO logger 2023-08-24 11:46:29,588 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,588 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,588 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,589 | data_utils.py:382 | Observations info in Stort33\n",
      "INFO logger 2023-08-24 11:46:29,590 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,591 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,591 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,592 | data_utils.py:382 | Observations info in Stort34\n",
      "INFO logger 2023-08-24 11:46:29,593 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,593 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,593 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,594 | data_utils.py:382 | Observations info in Stort35\n",
      "INFO logger 2023-08-24 11:46:29,595 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,595 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,596 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,597 | data_utils.py:382 | Observations info in Stort36\n",
      "INFO logger 2023-08-24 11:46:29,597 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,598 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,598 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,599 | data_utils.py:382 | Observations info in Stort37\n",
      "INFO logger 2023-08-24 11:46:29,600 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,601 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,601 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,602 | data_utils.py:382 | Observations info in Stort38\n",
      "INFO logger 2023-08-24 11:46:29,603 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,604 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,604 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,605 | data_utils.py:382 | Observations info in Stort39\n",
      "INFO logger 2023-08-24 11:46:29,606 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,606 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,606 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,607 | data_utils.py:382 | Observations info in Stort40\n",
      "INFO logger 2023-08-24 11:46:29,607 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,608 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,608 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,609 | data_utils.py:382 | Observations info in Stort41\n",
      "INFO logger 2023-08-24 11:46:29,610 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,611 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,612 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,613 | data_utils.py:382 | Observations info in Stort42\n",
      "INFO logger 2023-08-24 11:46:29,613 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,614 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,614 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,615 | data_utils.py:382 | Observations info in Stort43\n",
      "INFO logger 2023-08-24 11:46:29,615 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,616 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,616 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,617 | data_utils.py:382 | Observations info in Stort44\n",
      "INFO logger 2023-08-24 11:46:29,618 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,619 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,620 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,621 | data_utils.py:382 | Observations info in Stort45\n",
      "INFO logger 2023-08-24 11:46:29,621 | data_utils.py:383 | \tTotal number of samples:  120\n",
      "INFO logger 2023-08-24 11:46:29,622 | data_utils.py:384 | \tNumber of samples for training: 96\n",
      "INFO logger 2023-08-24 11:46:29,622 | data_utils.py:385 | \tNumber of samples for validation:  24\n",
      "INFO logger 2023-08-24 11:46:29,626 | data_utils.py:388 | Observations info using all data\n",
      "INFO logger 2023-08-24 11:46:29,626 | data_utils.py:389 | \tTotal number of samples:  5400\n",
      "INFO logger 2023-08-24 11:46:29,627 | data_utils.py:390 | \tNumber of samples for training: 4320\n",
      "INFO logger 2023-08-24 11:46:29,627 | data_utils.py:391 | \tNumber of samples for validation:  1080\n"
     ]
    }
   ],
   "source": [
    "X_train, X_val, y_train, y_val, exogenous_data_train, exogenous_data_val, x_scaler, y_scaler = make_preprocessing()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "45e64887",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:30.910673Z",
     "end_time": "2023-08-24T11:46:30.944819Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": "            Unemployment_lag-5  CPI_lag-5  Fuel_Price_lag-5  \\\nDate                                                          \n2010-03-12            -0.10269   1.072181         -1.415754   \n2010-03-19            -0.10269   1.075960         -1.469590   \n2010-03-26            -0.10269   1.077178         -1.545857   \n2010-04-02            -0.10269   1.077968         -1.440429   \n2010-04-09            -0.10269   1.078759         -1.296867   \n\n            Temperature_lag-5  Holiday_Flag_lag-5  Weekly_Sales_lag-5  \\\nDate                                                                    \n2010-03-12          -0.962758           -0.280449            1.071075   \n2010-03-19          -1.165565            3.565710            1.067993   \n2010-03-26          -1.089779           -0.280449            1.014671   \n2010-04-02          -0.732198           -0.280449            0.655082   \n2010-04-09          -0.739136           -0.280449            0.913036   \n\n            Unemployment_lag-4  CPI_lag-4  Fuel_Price_lag-4  \\\nDate                                                          \n2010-03-12            -0.10269   1.075960         -1.469590   \n2010-03-19            -0.10269   1.077178         -1.545857   \n2010-03-26            -0.10269   1.077968         -1.440429   \n2010-04-02            -0.10269   1.078759         -1.296867   \n2010-04-09            -0.10269   1.079549         -1.202655   \n\n            Temperature_lag-4  ...  Temperature_lag-2  Holiday_Flag_lag-2  \\\nDate                           ...                                          \n2010-03-12          -1.165565  ...          -0.732198           -0.280449   \n2010-03-19          -1.089779  ...          -0.739136           -0.280449   \n2010-03-26          -0.732198  ...          -0.136586           -0.280449   \n2010-04-02          -0.739136  ...          -0.307904           -0.280449   \n2010-04-09          -0.136586  ...          -0.474953           -0.280449   \n\n            Weekly_Sales_lag-2  Unemployment_lag-1  CPI_lag-1  \\\nDate                                                            \n2010-03-12            0.655082           -0.102690   1.078759   \n2010-03-19            0.913036           -0.102690   1.079549   \n2010-03-26            0.708092           -0.102690   1.075272   \n2010-04-02            0.766721           -0.102690   1.070151   \n2010-04-09            0.645662           -0.261287   1.065030   \n\n            Fuel_Price_lag-1  Temperature_lag-1  Holiday_Flag_lag-1  \\\nDate                                                                  \n2010-03-12         -1.296867          -0.739136           -0.280449   \n2010-03-19         -1.202655          -0.136586           -0.280449   \n2010-03-26         -1.083768          -0.307904           -0.280449   \n2010-04-02         -1.056850          -0.474953           -0.280449   \n2010-04-09         -1.086011           0.102513           -0.280449   \n\n            Weekly_Sales_lag-1  District  \nDate                                      \n2010-03-12            0.913036    Stort1  \n2010-03-19            0.708092    Stort1  \n2010-03-26            0.766721    Stort1  \n2010-04-02            0.645662    Stort1  \n2010-04-09            0.984445    Stort1  \n\n[5 rows x 31 columns]",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>Unemployment_lag-5</th>\n      <th>CPI_lag-5</th>\n      <th>Fuel_Price_lag-5</th>\n      <th>Temperature_lag-5</th>\n      <th>Holiday_Flag_lag-5</th>\n      <th>Weekly_Sales_lag-5</th>\n      <th>Unemployment_lag-4</th>\n      <th>CPI_lag-4</th>\n      <th>Fuel_Price_lag-4</th>\n      <th>Temperature_lag-4</th>\n      <th>...</th>\n      <th>Temperature_lag-2</th>\n      <th>Holiday_Flag_lag-2</th>\n      <th>Weekly_Sales_lag-2</th>\n      <th>Unemployment_lag-1</th>\n      <th>CPI_lag-1</th>\n      <th>Fuel_Price_lag-1</th>\n      <th>Temperature_lag-1</th>\n      <th>Holiday_Flag_lag-1</th>\n      <th>Weekly_Sales_lag-1</th>\n      <th>District</th>\n    </tr>\n    <tr>\n      <th>Date</th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n      <th></th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>2010-03-12</th>\n      <td>-0.10269</td>\n      <td>1.072181</td>\n      <td>-1.415754</td>\n      <td>-0.962758</td>\n      <td>-0.280449</td>\n      <td>1.071075</td>\n      <td>-0.10269</td>\n      <td>1.075960</td>\n      <td>-1.469590</td>\n      <td>-1.165565</td>\n      <td>...</td>\n      <td>-0.732198</td>\n      <td>-0.280449</td>\n      <td>0.655082</td>\n      <td>-0.102690</td>\n      <td>1.078759</td>\n      <td>-1.296867</td>\n      <td>-0.739136</td>\n      <td>-0.280449</td>\n      <td>0.913036</td>\n      <td>Stort1</td>\n    </tr>\n    <tr>\n      <th>2010-03-19</th>\n      <td>-0.10269</td>\n      <td>1.075960</td>\n      <td>-1.469590</td>\n      <td>-1.165565</td>\n      <td>3.565710</td>\n      <td>1.067993</td>\n      <td>-0.10269</td>\n      <td>1.077178</td>\n      <td>-1.545857</td>\n      <td>-1.089779</td>\n      <td>...</td>\n      <td>-0.739136</td>\n      <td>-0.280449</td>\n      <td>0.913036</td>\n      <td>-0.102690</td>\n      <td>1.079549</td>\n      <td>-1.202655</td>\n      <td>-0.136586</td>\n      <td>-0.280449</td>\n      <td>0.708092</td>\n      <td>Stort1</td>\n    </tr>\n    <tr>\n      <th>2010-03-26</th>\n      <td>-0.10269</td>\n      <td>1.077178</td>\n      <td>-1.545857</td>\n      <td>-1.089779</td>\n      <td>-0.280449</td>\n      <td>1.014671</td>\n      <td>-0.10269</td>\n      <td>1.077968</td>\n      <td>-1.440429</td>\n      <td>-0.732198</td>\n      <td>...</td>\n      <td>-0.136586</td>\n      <td>-0.280449</td>\n      <td>0.708092</td>\n      <td>-0.102690</td>\n      <td>1.075272</td>\n      <td>-1.083768</td>\n      <td>-0.307904</td>\n      <td>-0.280449</td>\n      <td>0.766721</td>\n      <td>Stort1</td>\n    </tr>\n    <tr>\n      <th>2010-04-02</th>\n      <td>-0.10269</td>\n      <td>1.077968</td>\n      <td>-1.440429</td>\n      <td>-0.732198</td>\n      <td>-0.280449</td>\n      <td>0.655082</td>\n      <td>-0.10269</td>\n      <td>1.078759</td>\n      <td>-1.296867</td>\n      <td>-0.739136</td>\n      <td>...</td>\n      <td>-0.307904</td>\n      <td>-0.280449</td>\n      <td>0.766721</td>\n      <td>-0.102690</td>\n      <td>1.070151</td>\n      <td>-1.056850</td>\n      <td>-0.474953</td>\n      <td>-0.280449</td>\n      <td>0.645662</td>\n      <td>Stort1</td>\n    </tr>\n    <tr>\n      <th>2010-04-09</th>\n      <td>-0.10269</td>\n      <td>1.078759</td>\n      <td>-1.296867</td>\n      <td>-0.739136</td>\n      <td>-0.280449</td>\n      <td>0.913036</td>\n      <td>-0.10269</td>\n      <td>1.079549</td>\n      <td>-1.202655</td>\n      <td>-0.136586</td>\n      <td>...</td>\n      <td>-0.474953</td>\n      <td>-0.280449</td>\n      <td>0.645662</td>\n      <td>-0.261287</td>\n      <td>1.065030</td>\n      <td>-1.086011</td>\n      <td>0.102513</td>\n      <td>-0.280449</td>\n      <td>0.984445</td>\n      <td>Stort1</td>\n    </tr>\n  </tbody>\n</table>\n<p>5 rows × 31 columns</p>\n</div>"
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "X_train.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "591b150f",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:30.944819Z",
     "end_time": "2023-08-24T11:46:30.972821Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": "            Weekly_Sales District\nDate                             \n2010-03-12      0.708092   Stort1\n2010-03-19      0.766721   Stort1\n2010-03-26      0.645662   Stort1\n2010-04-02      0.984445   Stort1\n2010-04-09      0.896344   Stort1",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>Weekly_Sales</th>\n      <th>District</th>\n    </tr>\n    <tr>\n      <th>Date</th>\n      <th></th>\n      <th></th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>2010-03-12</th>\n      <td>0.708092</td>\n      <td>Stort1</td>\n    </tr>\n    <tr>\n      <th>2010-03-19</th>\n      <td>0.766721</td>\n      <td>Stort1</td>\n    </tr>\n    <tr>\n      <th>2010-03-26</th>\n      <td>0.645662</td>\n      <td>Stort1</td>\n    </tr>\n    <tr>\n      <th>2010-04-02</th>\n      <td>0.984445</td>\n      <td>Stort1</td>\n    </tr>\n    <tr>\n      <th>2010-04-09</th>\n      <td>0.896344</td>\n      <td>Stort1</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "y_train.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "a0fa465a",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:30.959817Z",
     "end_time": "2023-08-24T11:46:31.019815Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": "(StandardScaler(), StandardScaler())"
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x_scaler, y_scaler"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "55102f6f",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:30.974817Z",
     "end_time": "2023-08-24T11:46:31.117817Z"
    }
   },
   "outputs": [],
   "source": [
    "def make_postprocessing(X_train, X_val, y_train, y_val, exogenous_data_train, exogenous_data_val, x_scaler, y_scaler):\n",
    "    \"\"\"Make data ready to be fed into ml algorithms\"\"\"\n",
    "    # if there are more than one specified areas, get the data per area\n",
    "    if X_train[args.identifier].nunique() != 1:\n",
    "        area_X_train, area_X_val, area_y_train, area_y_val = get_data_by_area(X_train, X_val,\n",
    "                                                                              y_train, y_val, \n",
    "                                                                              identifier=args.identifier)\n",
    "    else:\n",
    "        area_X_train, area_X_val, area_y_train, area_y_val = None, None, None, None\n",
    "\n",
    "    # Get the exogenous data per area.\n",
    "    if exogenous_data_train is not None:\n",
    "        exogenous_data_train, exogenous_data_val = get_exogenous_data_by_area(exogenous_data_train,\n",
    "                                                                              exogenous_data_val)\n",
    "    # transform to np\n",
    "    if area_X_train is not None:\n",
    "        for area in area_X_train:\n",
    "            tmp_X_train, tmp_y_train, tmp_X_val, tmp_y_val = remove_identifiers(\n",
    "                area_X_train[area], area_y_train[area], area_X_val[area], area_y_val[area])\n",
    "            tmp_X_train, tmp_y_train = tmp_X_train.to_numpy(), tmp_y_train.to_numpy()\n",
    "            tmp_X_val, tmp_y_val = tmp_X_val.to_numpy(), tmp_y_val.to_numpy()\n",
    "            area_X_train[area] = tmp_X_train\n",
    "            area_X_val[area] = tmp_X_val\n",
    "            area_y_train[area] = tmp_y_train\n",
    "            area_y_val[area] = tmp_y_val\n",
    "    \n",
    "    if exogenous_data_train is not None:\n",
    "        for area in exogenous_data_train:\n",
    "            exogenous_data_train[area] = exogenous_data_train[area].to_numpy()\n",
    "            exogenous_data_val[area] = exogenous_data_val[area].to_numpy()\n",
    "    \n",
    "    # remove identifiers from features, targets\n",
    "    X_train, y_train, X_val, y_val = remove_identifiers(X_train, y_train, X_val, y_val)\n",
    "    assert len(X_train.columns) == len(X_val.columns)\n",
    "    \n",
    "    num_features = len(X_train.columns) // args.num_lags\n",
    "    \n",
    "    # to timeseries representation\n",
    "    X_train = to_timeseries_rep(X_train.to_numpy(), num_lags=args.num_lags,\n",
    "                                            num_features=num_features)\n",
    "    X_val = to_timeseries_rep(X_val.to_numpy(), num_lags=args.num_lags,\n",
    "                                          num_features=num_features)\n",
    "    \n",
    "    if area_X_train is not None:\n",
    "        area_X_train = to_timeseries_rep(area_X_train, num_lags=args.num_lags,\n",
    "                                                     num_features=num_features)\n",
    "        area_X_val = to_timeseries_rep(area_X_val, num_lags=args.num_lags,\n",
    "                                                   num_features=num_features)\n",
    "    \n",
    "    # transform targets to numpy\n",
    "    y_train, y_val = y_train.to_numpy(), y_val.to_numpy()\n",
    "    \n",
    "    # centralized (all) learning specific\n",
    "    if not args.filter_bs and exogenous_data_train is not None:\n",
    "        exogenous_data_train_combined, exogenous_data_val_combined = [], []\n",
    "        for area in exogenous_data_train:\n",
    "            exogenous_data_train_combined.extend(exogenous_data_train[area])\n",
    "            exogenous_data_val_combined.extend(exogenous_data_val[area])\n",
    "        exogenous_data_train_combined = np.stack(exogenous_data_train_combined)\n",
    "        exogenous_data_val_combined = np.stack(exogenous_data_val_combined)\n",
    "        exogenous_data_train[\"all\"] = exogenous_data_train_combined\n",
    "        exogenous_data_val[\"all\"] = exogenous_data_val_combined\n",
    "    return X_train, X_val, y_train, y_val, area_X_train, area_X_val, area_y_train, area_y_val, exogenous_data_train, exogenous_data_val"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "698aa32c",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:30.989816Z",
     "end_time": "2023-08-24T11:46:31.210817Z"
    }
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "ce3616bf",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:31.006816Z",
     "end_time": "2023-08-24T11:46:31.233817Z"
    }
   },
   "outputs": [],
   "source": [
    "X_train, X_val, y_train, y_val,  client_X_train, client_X_val, client_y_train, client_y_val, exogenous_data_train, exogenous_data_val = make_postprocessing(X_train, X_val, y_train, y_val, exogenous_data_train, exogenous_data_val, x_scaler, y_scaler)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "6592db61",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:31.100817Z",
     "end_time": "2023-08-24T11:46:31.234817Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": "dict_keys(['Stort1', 'Stort2', 'Stort3', 'Stort4', 'Stort5', 'Stort6', 'Stort7', 'Stort8', 'Stort9', 'Stort10', 'Stort11', 'Stort12', 'Stort13', 'Stort14', 'Stort15', 'Stort16', 'Stort17', 'Stort18', 'Stort19', 'Stort20', 'Stort21', 'Stort22', 'Stort23', 'Stort24', 'Stort25', 'Stort26', 'Stort27', 'Stort28', 'Stort29', 'Stort30', 'Stort31', 'Stort32', 'Stort33', 'Stort34', 'Stort35', 'Stort36', 'Stort37', 'Stort38', 'Stort39', 'Stort40', 'Stort41', 'Stort42', 'Stort43', 'Stort44', 'Stort45'])"
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "client_X_val.keys()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "d521ab70",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:31.117817Z",
     "end_time": "2023-08-24T11:46:31.234817Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": "dict_keys(['Stort1', 'Stort2', 'Stort3', 'Stort4', 'Stort5', 'Stort6', 'Stort7', 'Stort8', 'Stort9', 'Stort10', 'Stort11', 'Stort12', 'Stort13', 'Stort14', 'Stort15', 'Stort16', 'Stort17', 'Stort18', 'Stort19', 'Stort20', 'Stort21', 'Stort22', 'Stort23', 'Stort24', 'Stort25', 'Stort26', 'Stort27', 'Stort28', 'Stort29', 'Stort30', 'Stort31', 'Stort32', 'Stort33', 'Stort34', 'Stort35', 'Stort36', 'Stort37', 'Stort38', 'Stort39', 'Stort40', 'Stort41', 'Stort42', 'Stort43', 'Stort44', 'Stort45'])"
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "client_X_val.keys()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "8a11d39b",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:31.133817Z",
     "end_time": "2023-08-24T11:46:31.235817Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": "array([[0.70809203],\n       [0.76672095]], dtype=float32)"
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "y_train[:2]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "id": "076cc206",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:31.150821Z",
     "end_time": "2023-08-24T11:46:31.235817Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": "(4095, 855)"
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(X_train), len(X_val)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "id": "a0274244",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:31.167817Z",
     "end_time": "2023-08-24T11:46:31.235817Z"
    }
   },
   "outputs": [],
   "source": [
    "def get_input_dims(X_train, exogenous_data_train):\n",
    "    if args.model_name == \"mlp\":\n",
    "        input_dim = X_train.shape[1] * X_train.shape[2]\n",
    "    else:\n",
    "        input_dim = X_train.shape[2]\n",
    "    \n",
    "    if exogenous_data_train is not None:\n",
    "        if len(exogenous_data_train) == 1:\n",
    "            cid = next(iter(exogenous_data_train.keys()))\n",
    "            exogenous_dim = exogenous_data_train[cid].shape[1]\n",
    "        else:\n",
    "            exogenous_dim = exogenous_data_train[\"all\"].shape[1]\n",
    "    else:\n",
    "        exogenous_dim = 0\n",
    "    \n",
    "    return input_dim, exogenous_dim"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "id": "e90c8fc3",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:31.180817Z",
     "end_time": "2023-08-24T11:46:31.235817Z"
    }
   },
   "outputs": [],
   "source": [
    "def get_model(model: str,\n",
    "              input_dim: int,\n",
    "              out_dim: int,\n",
    "              lags: int = 5,\n",
    "              exogenous_dim: int = 0,\n",
    "              seed=0):\n",
    "    if model == \"mlp\":\n",
    "        model = MLP(input_dim=input_dim, layer_units=[256, 128, 64], num_outputs=out_dim)\n",
    "    elif model == \"rnn\":\n",
    "        model = RNN(input_dim=input_dim, rnn_hidden_size=128, num_rnn_layers=1, rnn_dropout=0.0,\n",
    "                    layer_units=[128], num_outputs=out_dim, matrix_rep=True, exogenous_dim=exogenous_dim)\n",
    "    elif model == \"lstm\":\n",
    "        model = LSTM(input_dim=input_dim, lstm_hidden_size=128, num_lstm_layers=1, lstm_dropout=0.0,\n",
    "                     layer_units=[128], num_outputs=out_dim, matrix_rep=True, exogenous_dim=exogenous_dim)\n",
    "    elif model == \"gru\":\n",
    "        model = GRU(input_dim=input_dim, gru_hidden_size=128, num_gru_layers=1, gru_dropout=0.0,\n",
    "                    layer_units=[128], num_outputs=out_dim, matrix_rep=True, exogenous_dim=exogenous_dim)\n",
    "    elif model == \"cnn\":\n",
    "        model = CNN(num_features=input_dim, lags=lags, exogenous_dim=exogenous_dim, out_dim=out_dim)\n",
    "    elif model == \"transformer\":\n",
    "        model = DualAttentionAutoEncoder(input_dim=input_dim, architecture=\"lstm\", matrix_rep=True)\n",
    "    else:\n",
    "        raise NotImplementedError(\"Specified model is not implemented. Plese define your own model or choose one from ['mlp', 'rnn', 'lstm', 'gru', 'cnn', 'da_encoder_decoder']\")\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "495af4a3",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:31.196817Z",
     "end_time": "2023-08-24T11:46:31.235817Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "6 0\n"
     ]
    }
   ],
   "source": [
    "# define the model\n",
    "args.model_name = \"transformer\"\n",
    "\n",
    "input_dim, exogenous_dim = get_input_dims(X_train, exogenous_data_train)\n",
    "\n",
    "print(input_dim, exogenous_dim)\n",
    "\n",
    "model = get_model(model=args.model_name,\n",
    "                  input_dim=input_dim,\n",
    "                  out_dim=y_train.shape[1],\n",
    "                  lags=args.num_lags,\n",
    "                  exogenous_dim=exogenous_dim,\n",
    "                  seed=args.seed)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "id": "b1933251",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:31.213817Z",
     "end_time": "2023-08-24T11:46:31.235817Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": "DualAttentionAutoEncoder(\n  (encoder): AttentionEncoder(\n    (rnn): LSTM(6, 64)\n    (attention): Linear(in_features=133, out_features=1, bias=True)\n    (softmax): Softmax(dim=1)\n  )\n  (decoder): AttentionDecoder(\n    (attention): Sequential(\n      (0): Linear(in_features=192, out_features=64, bias=True)\n      (1): Tanh()\n      (2): Linear(in_features=64, out_features=1, bias=True)\n    )\n    (rnn): LSTM(1, 64)\n    (fc): Linear(in_features=65, out_features=1, bias=True)\n    (fc_out): Linear(in_features=128, out_features=1, bias=True)\n    (softmax): Softmax(dim=1)\n  )\n)"
     },
     "execution_count": 26,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "id": "fa4b5479",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:31.231817Z",
     "end_time": "2023-08-24T11:46:31.337817Z"
    }
   },
   "outputs": [],
   "source": [
    "def fit(model, X_train, y_train, X_val, y_val, \n",
    "        exogenous_data_train=None, exogenous_data_val=None, \n",
    "        idxs=[1], # the indices of our targets in X\n",
    "        log_per=1):\n",
    "    \n",
    "    # get exogenous data (if any)\n",
    "    if exogenous_data_train is not None and len(exogenous_data_train) > 1:\n",
    "        exogenous_data_train = exogenous_data_train[\"all\"]\n",
    "        exogenous_data_val = exogenous_data_val[\"all\"]\n",
    "    elif exogenous_data_train is not None and len(exogenous_data_train) == 1:\n",
    "        cid = next(iter(exogenous_data_train.keys()))\n",
    "        exogenous_data_train = exogenous_data_train[cid]\n",
    "        exogenous_data_val = exogenous_data_val[cid]\n",
    "    else:\n",
    "        exogenous_data_train = None\n",
    "        exogenous_data_val = None\n",
    "    num_features = len(X_train[0][0])\n",
    "    \n",
    "    # to torch loader\n",
    "    train_loader = to_torch_dataset(X_train, y_train,\n",
    "                                    num_lags=args.num_lags,\n",
    "                                    num_features=num_features,\n",
    "                                    exogenous_data=exogenous_data_train,\n",
    "                                    indices=idxs,\n",
    "                                    batch_size=args.batch_size, \n",
    "                                    shuffle=False)\n",
    "    val_loader = to_torch_dataset(X_val, y_val, \n",
    "                                  num_lags=args.num_lags,\n",
    "                                  num_features=num_features,\n",
    "                                  exogenous_data=exogenous_data_val,\n",
    "                                  indices=idxs,\n",
    "                                  batch_size=args.batch_size,\n",
    "                                  shuffle=False)\n",
    "    \n",
    "    # train the model\n",
    "    model = train(model, \n",
    "                  train_loader, val_loader,\n",
    "                  epochs=args.epochs,\n",
    "                  optimizer=args.optimizer, lr=args.lr,\n",
    "                  criterion=args.criterion,\n",
    "                  early_stopping=args.early_stopping,\n",
    "                  patience=args.patience,\n",
    "                  plot_history=args.plot_history, \n",
    "                  device=device, log_per=log_per,\n",
    "                  use_carbontracker=True)\n",
    "    \n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "7edadc97",
   "metadata": {
    "ExecuteTime": {
     "start_time": "2023-08-24T11:46:31.245819Z",
     "end_time": "2023-08-24T11:48:52.011935Z"
    }
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:46:36,199 | train_utils.py:96 | Epoch 1 [Train]: loss 0.8492218530736864, mse: 0.5889507532119751, rmse: 0.7674312693733395, mae 0.6301352381706238, r2: 0.4107382144383549, nrmse: -606.7936535662783\n",
      "INFO logger 2023-08-24 11:46:36,200 | train_utils.py:98 | Epoch 1 [Test]: loss 0.004689756000948231, mse: 0.5787733197212219, rmse: 0.7607715292525227, mae 0.6389738917350769, r2: 0.37455413964076134, nrmse: -28.139097696210765\n",
      "INFO logger 2023-08-24 11:46:36,200 | helpers.py:147 | Validation loss decreased (inf --> 0.004690). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:37,114 | train_utils.py:96 | Epoch 2 [Train]: loss 0.3538001064443961, mse: 0.16946618258953094, rmse: 0.4116627048805016, mae 0.25949227809906006, r2: 0.8304443408883525, nrmse: -325.4940562109105\n",
      "INFO logger 2023-08-24 11:46:37,115 | train_utils.py:98 | Epoch 2 [Test]: loss 0.0019816945867928847, mse: 0.24988439679145813, rmse: 0.49988438342426555, mae 0.3073746860027313, r2: 0.7299648058934087, nrmse: -18.489513554491165\n",
      "INFO logger 2023-08-24 11:46:37,115 | helpers.py:147 | Validation loss decreased (0.004690 --> 0.001982). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:37,988 | train_utils.py:96 | Epoch 3 [Train]: loss 0.16050314909080043, mse: 0.16534821689128876, rmse: 0.4066303196901195, mae 0.25345659255981445, r2: 0.8345644795706333, nrmse: -321.5150426917998\n",
      "INFO logger 2023-08-24 11:46:37,989 | train_utils.py:98 | Epoch 3 [Test]: loss 0.0017133490209691009, mse: 0.21659107506275177, rmse: 0.4653934626343088, mae 0.29977917671203613, r2: 0.7659429336675021, nrmse: -17.21377786720218\n",
      "INFO logger 2023-08-24 11:46:37,989 | helpers.py:147 | Validation loss decreased (0.001982 --> 0.001713). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:38,857 | train_utils.py:96 | Epoch 4 [Train]: loss 0.1505814825213747, mse: 0.13916951417922974, rmse: 0.3730543045981774, mae 0.2210703343153, r2: 0.8607569950562453, nrmse: -294.9671110620754\n",
      "INFO logger 2023-08-24 11:46:38,858 | train_utils.py:98 | Epoch 4 [Test]: loss 0.0014586110953350513, mse: 0.1844721883535385, rmse: 0.42950225651740004, mae 0.26983898878097534, r2: 0.8006518881678241, nrmse: -15.886249014550671\n",
      "INFO logger 2023-08-24 11:46:38,859 | helpers.py:147 | Validation loss decreased (0.001713 --> 0.001459). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:39,731 | train_utils.py:96 | Epoch 5 [Train]: loss 0.13426415965659544, mse: 0.12648949027061462, rmse: 0.3556536099502079, mae 0.20538505911827087, r2: 0.8734437220136176, nrmse: -281.2087049332054\n",
      "INFO logger 2023-08-24 11:46:39,731 | train_utils.py:98 | Epoch 5 [Test]: loss 0.0012489607503191073, mse: 0.15790697932243347, rmse: 0.3973751116041787, mae 0.2477806806564331, r2: 0.8293593365624798, nrmse: -14.697943676282181\n",
      "INFO logger 2023-08-24 11:46:39,732 | helpers.py:147 | Validation loss decreased (0.001459 --> 0.001249). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:40,613 | train_utils.py:96 | Epoch 6 [Train]: loss 0.12277007463853806, mse: 0.11755059659481049, rmse: 0.3428565247954463, mae 0.1969824880361557, r2: 0.8823873235843054, nrmse: -271.0902873420153\n",
      "INFO logger 2023-08-24 11:46:40,613 | train_utils.py:98 | Epoch 6 [Test]: loss 0.0010808437583390731, mse: 0.13660016655921936, rmse: 0.3695945975785081, mae 0.2347469925880432, r2: 0.8523843368294571, nrmse: -13.670409695104736\n",
      "INFO logger 2023-08-24 11:46:40,614 | helpers.py:147 | Validation loss decreased (0.001249 --> 0.001081). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:41,506 | train_utils.py:96 | Epoch 7 [Train]: loss 0.11443275972851552, mse: 0.10994569212198257, rmse: 0.3315805967211932, mae 0.18917961418628693, r2: 0.8899962511230324, nrmse: -262.17462040663787\n",
      "INFO logger 2023-08-24 11:46:41,507 | train_utils.py:98 | Epoch 7 [Test]: loss 0.0009395715938499797, mse: 0.11881858110427856, rmse: 0.3447007123640428, mae 0.22059743106365204, r2: 0.8715998254964573, nrmse: -12.749645127618429\n",
      "INFO logger 2023-08-24 11:46:41,507 | helpers.py:147 | Validation loss decreased (0.001081 --> 0.000940). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:42,451 | train_utils.py:96 | Epoch 8 [Train]: loss 0.10730291489744559, mse: 0.10411228239536285, rmse: 0.32266434943352956, mae 0.18236038088798523, r2: 0.8958327512521529, nrmse: -255.12470925016424\n",
      "INFO logger 2023-08-24 11:46:42,452 | train_utils.py:98 | Epoch 8 [Test]: loss 0.0008383543279610182, mse: 0.10600113868713379, rmse: 0.32557816064216255, mae 0.2108181267976761, r2: 0.8854508828323814, nrmse: -12.042348218608806\n",
      "INFO logger 2023-08-24 11:46:42,452 | helpers.py:147 | Validation loss decreased (0.000940 --> 0.000838). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:43,399 | train_utils.py:96 | Epoch 9 [Train]: loss 0.10225987245212309, mse: 0.0983402281999588, rmse: 0.31359245558520504, mae 0.17739325761795044, r2: 0.9016078392509201, nrmse: -247.95173124851817\n",
      "INFO logger 2023-08-24 11:46:43,400 | train_utils.py:98 | Epoch 9 [Test]: loss 0.0007601319872147855, mse: 0.09614695608615875, rmse: 0.3100757263736695, mae 0.20142783224582672, r2: 0.8960997097417913, nrmse: -11.4689506930221\n",
      "INFO logger 2023-08-24 11:46:43,400 | helpers.py:147 | Validation loss decreased (0.000838 --> 0.000760). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:44,312 | train_utils.py:96 | Epoch 10 [Train]: loss 0.09889417674276046, mse: 0.09353217482566833, rmse: 0.30583030396883226, mae 0.1727973073720932, r2: 0.9064184372754275, nrmse: -241.81434210788504\n",
      "INFO logger 2023-08-24 11:46:44,313 | train_utils.py:98 | Epoch 10 [Test]: loss 0.0007103559697232051, mse: 0.08980671316385269, rmse: 0.29967768212506696, mae 0.19605796039104462, r2: 0.9029512356005682, nrmse: -11.084352200951257\n",
      "INFO logger 2023-08-24 11:46:44,313 | helpers.py:147 | Validation loss decreased (0.000760 --> 0.000710). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:45,076 | train_utils.py:96 | Epoch 11 [Train]: loss 0.0949806542485021, mse: 0.09020239114761353, rmse: 0.30033712915258, mae 0.16831329464912415, r2: 0.9097499759734059, nrmse: -237.47099078841927\n",
      "INFO logger 2023-08-24 11:46:45,077 | train_utils.py:98 | Epoch 11 [Test]: loss 0.0006662190810107348, mse: 0.08424241095781326, rmse: 0.2902454322772596, mae 0.18962609767913818, r2: 0.9089642388707306, nrmse: -10.735476106411681\n",
      "INFO logger 2023-08-24 11:46:45,077 | helpers.py:147 | Validation loss decreased (0.000710 --> 0.000666). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:45,912 | train_utils.py:96 | Epoch 12 [Train]: loss 0.09097480095806532, mse: 0.08682027459144592, rmse: 0.2946528034678203, mae 0.1647193282842636, r2: 0.9131338774594282, nrmse: -232.9765000268785\n",
      "INFO logger 2023-08-24 11:46:45,912 | train_utils.py:98 | Epoch 12 [Test]: loss 0.0006299062474080694, mse: 0.07962707430124283, rmse: 0.282182696672285, mae 0.18507802486419678, r2: 0.9139517661289952, nrmse: -10.437255029303275\n",
      "INFO logger 2023-08-24 11:46:45,913 | helpers.py:147 | Validation loss decreased (0.000666 --> 0.000630). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:46,730 | train_utils.py:96 | Epoch 13 [Train]: loss 0.08736835730087478, mse: 0.08397360146045685, rmse: 0.28978198953775036, mae 0.16210561990737915, r2: 0.9159820611889417, nrmse: -229.1252379029334\n",
      "INFO logger 2023-08-24 11:46:46,731 | train_utils.py:98 | Epoch 13 [Test]: loss 0.0006008684460879766, mse: 0.0759458988904953, rmse: 0.2755828348981397, mae 0.1819373071193695, r2: 0.9179297770744479, nrmse: -10.193142114843093\n",
      "INFO logger 2023-08-24 11:46:46,731 | helpers.py:147 | Validation loss decreased (0.000630 --> 0.000601). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:47,493 | train_utils.py:96 | Epoch 14 [Train]: loss 0.0842242295111646, mse: 0.08196572959423065, rmse: 0.2862965762880001, mae 0.16012342274188995, r2: 0.9179909914564155, nrmse: -226.3693863701554\n",
      "INFO logger 2023-08-24 11:46:47,494 | train_utils.py:98 | Epoch 14 [Test]: loss 0.0005694078118131872, mse: 0.0720401480793953, rmse: 0.26840295840283745, mae 0.17807286977767944, r2: 0.9221505002807108, nrmse: -9.927575859562035\n",
      "INFO logger 2023-08-24 11:46:47,494 | helpers.py:147 | Validation loss decreased (0.000601 --> 0.000569). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:48,292 | train_utils.py:96 | Epoch 15 [Train]: loss 0.08182726005179575, mse: 0.08037162572145462, rmse: 0.28349889897749975, mae 0.15703602135181427, r2: 0.919585938675664, nrmse: -224.15731487334995\n",
      "INFO logger 2023-08-24 11:46:48,292 | train_utils.py:98 | Epoch 15 [Test]: loss 0.0005443314188405087, mse: 0.06887216120958328, rmse: 0.26243506093809815, mae 0.17310266196727753, r2: 0.925573950604329, nrmse: -9.70683777546698\n",
      "INFO logger 2023-08-24 11:46:48,293 | helpers.py:147 | Validation loss decreased (0.000569 --> 0.000544). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:49,111 | train_utils.py:96 | Epoch 16 [Train]: loss 0.08087432463071309, mse: 0.07868766784667969, rmse: 0.28051322223146574, mae 0.1545194387435913, r2: 0.9212707805761723, nrmse: -221.79659571400006\n",
      "INFO logger 2023-08-24 11:46:49,112 | train_utils.py:98 | Epoch 16 [Test]: loss 0.0005156743715991054, mse: 0.06530910730361938, rmse: 0.25555646597888965, mae 0.1686907708644867, r2: 0.9294243385886687, nrmse: -9.452415195063628\n",
      "INFO logger 2023-08-24 11:46:49,113 | helpers.py:147 | Validation loss decreased (0.000544 --> 0.000516). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:50,111 | train_utils.py:96 | Epoch 17 [Train]: loss 0.08069228474778356, mse: 0.07814282178878784, rmse: 0.2795403759545083, mae 0.15403586626052856, r2: 0.9218159181457745, nrmse: -221.0273842284746\n",
      "INFO logger 2023-08-24 11:46:50,112 | train_utils.py:98 | Epoch 17 [Test]: loss 0.0004972464105452014, mse: 0.06298663467168808, rmse: 0.2509713821767097, mae 0.1649642139673233, r2: 0.9319340911880293, nrmse: -9.282824041749018\n",
      "INFO logger 2023-08-24 11:46:50,113 | helpers.py:147 | Validation loss decreased (0.000516 --> 0.000497). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:51,073 | train_utils.py:96 | Epoch 18 [Train]: loss 0.08082476347044576, mse: 0.07710342109203339, rmse: 0.27767502785096354, mae 0.15327690541744232, r2: 0.9228558676824504, nrmse: -219.55248812234237\n",
      "INFO logger 2023-08-24 11:46:51,073 | train_utils.py:98 | Epoch 18 [Test]: loss 0.00048713205894182997, mse: 0.061698127537965775, rmse: 0.24839107781473507, mae 0.16543515026569366, r2: 0.9333265076826621, nrmse: -9.187384828087984\n",
      "INFO logger 2023-08-24 11:46:51,074 | helpers.py:147 | Validation loss decreased (0.000497 --> 0.000487). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:51,931 | train_utils.py:96 | Epoch 19 [Train]: loss 0.08112518501002342, mse: 0.07641425728797913, rmse: 0.2764312885474058, mae 0.1527407318353653, r2: 0.9235453881949437, nrmse: -218.56908655114233\n",
      "INFO logger 2023-08-24 11:46:51,932 | train_utils.py:98 | Epoch 19 [Test]: loss 0.0004856112788905177, mse: 0.061476368457078934, rmse: 0.24794428498571797, mae 0.16619712114334106, r2: 0.9335661484814816, nrmse: -9.170859042642212\n",
      "INFO logger 2023-08-24 11:46:51,932 | helpers.py:147 | Validation loss decreased (0.000487 --> 0.000486). Caching model ...\n",
      "INFO logger 2023-08-24 11:46:52,866 | train_utils.py:96 | Epoch 20 [Train]: loss 0.08107912464765832, mse: 0.0760541558265686, rmse: 0.2757791794653262, mae 0.15619757771492004, r2: 0.9239056769973427, nrmse: -218.05347600954696\n",
      "INFO logger 2023-08-24 11:46:52,867 | train_utils.py:98 | Epoch 20 [Test]: loss 0.0004933548851581345, mse: 0.06242293491959572, rmse: 0.24984582229766364, mae 0.17169593274593353, r2: 0.9325432490919656, nrmse: -9.241192305831493\n",
      "INFO logger 2023-08-24 11:46:52,867 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:46:53,857 | train_utils.py:96 | Epoch 21 [Train]: loss 0.07899220290710218, mse: 0.07621251791715622, rmse: 0.2760661477203538, mae 0.15920618176460266, r2: 0.9237472344368417, nrmse: -218.2803764798235\n",
      "INFO logger 2023-08-24 11:46:53,858 | train_utils.py:98 | Epoch 21 [Test]: loss 0.0005028555529159412, mse: 0.06360813230276108, rmse: 0.252206527081995, mae 0.17615966498851776, r2: 0.9312624773457391, nrmse: -9.328509062576424\n",
      "INFO logger 2023-08-24 11:46:53,858 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:46:55,187 | train_utils.py:96 | Epoch 22 [Train]: loss 0.07653934143309016, mse: 0.07556835561990738, rmse: 0.27489699092552355, mae 0.15833818912506104, r2: 0.9243917391911973, nrmse: -217.35594591328405\n",
      "INFO logger 2023-08-24 11:46:55,188 | train_utils.py:98 | Epoch 22 [Test]: loss 0.0004991347416799668, mse: 0.0631219819188118, rmse: 0.25124088425017893, mae 0.1758645921945572, r2: 0.9317878356977021, nrmse: -9.292792271214877\n",
      "INFO logger 2023-08-24 11:46:55,189 | helpers.py:135 | EarlyStopping counter: 3 out of 50\n",
      "INFO logger 2023-08-24 11:46:57,108 | train_utils.py:96 | Epoch 23 [Train]: loss 0.07534876679710578, mse: 0.07459531724452972, rmse: 0.2731214331474733, mae 0.15655602514743805, r2: 0.9253652925668984, nrmse: -215.95204535012232\n",
      "INFO logger 2023-08-24 11:46:57,110 | train_utils.py:98 | Epoch 23 [Test]: loss 0.0004904044737592775, mse: 0.06200290843844414, rmse: 0.24900383217622202, mae 0.17415836453437805, r2: 0.9329971451238133, nrmse: -9.210049128970276\n",
      "INFO logger 2023-08-24 11:46:57,111 | helpers.py:135 | EarlyStopping counter: 4 out of 50\n",
      "INFO logger 2023-08-24 11:46:58,959 | train_utils.py:96 | Epoch 24 [Train]: loss 0.07436729518667562, mse: 0.07374706864356995, rmse: 0.27156411516172374, mae 0.15499122440814972, r2: 0.9262139937251388, nrmse: -214.72070293804015\n",
      "INFO logger 2023-08-24 11:46:58,960 | train_utils.py:98 | Epoch 24 [Test]: loss 0.00047962948440285455, mse: 0.06067107245326042, rmse: 0.246314986253903, mae 0.17139992117881775, r2: 0.9344363846494144, nrmse: -9.110595225677478\n",
      "INFO logger 2023-08-24 11:46:58,961 | helpers.py:147 | Validation loss decreased (0.000486 --> 0.000480). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:00,798 | train_utils.py:96 | Epoch 25 [Train]: loss 0.0736563194077462, mse: 0.07295786589384079, rmse: 0.270107137806169, mae 0.15424227714538574, r2: 0.9270036154774349, nrmse: -213.56869799893687\n",
      "INFO logger 2023-08-24 11:47:00,799 | train_utils.py:98 | Epoch 25 [Test]: loss 0.0004718979167049391, mse: 0.05971977859735489, rmse: 0.244376305310795, mae 0.17005975544452667, r2: 0.9354643906456951, nrmse: -9.038888109464155\n",
      "INFO logger 2023-08-24 11:47:00,800 | helpers.py:147 | Validation loss decreased (0.000480 --> 0.000472). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:02,689 | train_utils.py:96 | Epoch 26 [Train]: loss 0.07300665950606344, mse: 0.07203090190887451, rmse: 0.26838573343021516, mae 0.1520538181066513, r2: 0.9279310590567681, nrmse: -212.20761552518914\n",
      "INFO logger 2023-08-24 11:47:02,690 | train_utils.py:98 | Epoch 26 [Test]: loss 0.0004632231436277691, mse: 0.05858488008379936, rmse: 0.24204313682440856, mae 0.16715843975543976, r2: 0.9366908082994255, nrmse: -8.952589853738612\n",
      "INFO logger 2023-08-24 11:47:02,691 | helpers.py:147 | Validation loss decreased (0.000472 --> 0.000463). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:04,447 | train_utils.py:96 | Epoch 27 [Train]: loss 0.07282407610182418, mse: 0.07304588705301285, rmse: 0.2702700261830987, mae 0.15790899097919464, r2: 0.9269155441083452, nrmse: -213.69749081375318\n",
      "INFO logger 2023-08-24 11:47:04,449 | train_utils.py:98 | Epoch 27 [Test]: loss 0.000480851881772454, mse: 0.06077909469604492, rmse: 0.24653416537276313, mae 0.1750456988811493, r2: 0.9343196491348812, nrmse: -9.11870213084076\n",
      "INFO logger 2023-08-24 11:47:04,450 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:47:06,342 | train_utils.py:96 | Epoch 28 [Train]: loss 0.07192150085757021, mse: 0.07083785533905029, rmse: 0.2661538189450798, mae 0.15085549652576447, r2: 0.929124736451886, nrmse: -210.44288218823678\n",
      "INFO logger 2023-08-24 11:47:06,343 | train_utils.py:98 | Epoch 28 [Test]: loss 0.0004598769658838796, mse: 0.058147139847278595, rmse: 0.24113718055762076, mae 0.16628071665763855, r2: 0.9371638461371601, nrmse: -8.919080723967832\n",
      "INFO logger 2023-08-24 11:47:06,344 | helpers.py:147 | Validation loss decreased (0.000463 --> 0.000460). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:08,055 | train_utils.py:96 | Epoch 29 [Train]: loss 0.07267603804939426, mse: 0.07294529676437378, rmse: 0.2700838698707751, mae 0.1588425189256668, r2: 0.9270161812489199, nrmse: -213.5503004745043\n",
      "INFO logger 2023-08-24 11:47:08,056 | train_utils.py:98 | Epoch 29 [Test]: loss 0.00047805426391767477, mse: 0.06045394018292427, rmse: 0.24587382980489053, mae 0.17490366101264954, r2: 0.9346710207207699, nrmse: -9.094277916287272\n",
      "INFO logger 2023-08-24 11:47:08,056 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:47:08,913 | train_utils.py:96 | Epoch 30 [Train]: loss 0.07235436644987203, mse: 0.0728771910071373, rmse: 0.2699577578198806, mae 0.1602666974067688, r2: 0.9270843225940562, nrmse: -213.45058601775105\n",
      "INFO logger 2023-08-24 11:47:08,914 | train_utils.py:98 | Epoch 30 [Test]: loss 0.0004922607162019662, mse: 0.062137652188539505, rmse: 0.24927425095372266, mae 0.18005706369876862, r2: 0.9328515359871437, nrmse: -9.220051265099707\n",
      "INFO logger 2023-08-24 11:47:08,915 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:47:09,984 | train_utils.py:96 | Epoch 31 [Train]: loss 0.07046952079690527, mse: 0.06911269575357437, rmse: 0.26289293591417473, mae 0.14689411222934723, r2: 0.9308508098181841, nrmse: -207.86456252999446\n",
      "INFO logger 2023-08-24 11:47:09,985 | train_utils.py:98 | Epoch 31 [Test]: loss 0.0004527313754572506, mse: 0.0572226345539093, rmse: 0.23921253009386717, mae 0.16200129687786102, r2: 0.9381629078167115, nrmse: -8.847892561230156\n",
      "INFO logger 2023-08-24 11:47:09,986 | helpers.py:147 | Validation loss decreased (0.000460 --> 0.000453). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:10,886 | train_utils.py:96 | Epoch 32 [Train]: loss 0.07458510706783272, mse: 0.07701849192380905, rmse: 0.27752205664380813, mae 0.16852231323719025, r2: 0.9229408408079716, nrmse: -219.43153662950567\n",
      "INFO logger 2023-08-24 11:47:10,887 | train_utils.py:98 | Epoch 32 [Test]: loss 0.0005213524078765111, mse: 0.06593269109725952, rmse: 0.2567736183825346, mae 0.18736179172992706, r2: 0.9287504669616682, nrmse: -9.497434716799665\n",
      "INFO logger 2023-08-24 11:47:10,888 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:47:11,804 | train_utils.py:96 | Epoch 33 [Train]: loss 0.07265441283379914, mse: 0.07140481472015381, rmse: 0.2672167934845297, mae 0.14807334542274475, r2: 0.9285574834194972, nrmse: -211.2833564172416\n",
      "INFO logger 2023-08-24 11:47:11,805 | train_utils.py:98 | Epoch 33 [Test]: loss 0.0004576837676658965, mse: 0.057907845824956894, rmse: 0.24064049082595576, mae 0.16106553375720978, r2: 0.9374224363992709, nrmse: -8.900709372850423\n",
      "INFO logger 2023-08-24 11:47:11,806 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:47:12,834 | train_utils.py:96 | Epoch 34 [Train]: loss 0.07692563143791631, mse: 0.07630891352891922, rmse: 0.27624068043812666, mae 0.16878537833690643, r2: 0.9236507856273061, nrmse: -218.41837625871017\n",
      "INFO logger 2023-08-24 11:47:12,835 | train_utils.py:98 | Epoch 34 [Test]: loss 0.0005234853762109377, mse: 0.0660773441195488, rmse: 0.2570551382866112, mae 0.18828725814819336, r2: 0.9285941403124637, nrmse: -9.507847456734902\n",
      "INFO logger 2023-08-24 11:47:12,835 | helpers.py:135 | EarlyStopping counter: 3 out of 50\n",
      "INFO logger 2023-08-24 11:47:13,708 | train_utils.py:96 | Epoch 35 [Train]: loss 0.07178852646029554, mse: 0.07072904706001282, rmse: 0.2659493317532737, mae 0.150430366396904, r2: 0.929233602998833, nrmse: -210.2811979629838\n",
      "INFO logger 2023-08-24 11:47:13,709 | train_utils.py:98 | Epoch 35 [Test]: loss 0.00046142370859433336, mse: 0.05834128335118294, rmse: 0.24153940330965243, mae 0.1650986671447754, r2: 0.93695404743162, nrmse: -8.933957970131576\n",
      "INFO logger 2023-08-24 11:47:13,709 | helpers.py:135 | EarlyStopping counter: 4 out of 50\n",
      "INFO logger 2023-08-24 11:47:14,628 | train_utils.py:96 | Epoch 36 [Train]: loss 0.07545398814545479, mse: 0.0709187239408493, rmse: 0.26630569641081525, mae 0.15871280431747437, r2: 0.9290438326628505, nrmse: -210.56296888004343\n",
      "INFO logger 2023-08-24 11:47:14,629 | train_utils.py:98 | Epoch 36 [Test]: loss 0.0004892876949052365, mse: 0.061770398169755936, rmse: 0.248536512749648, mae 0.17936836183071136, r2: 0.9332484059779144, nrmse: -9.192764114358038\n",
      "INFO logger 2023-08-24 11:47:14,629 | helpers.py:135 | EarlyStopping counter: 5 out of 50\n",
      "INFO logger 2023-08-24 11:47:15,533 | train_utils.py:96 | Epoch 37 [Train]: loss 0.06912317952082958, mse: 0.06798867136240005, rmse: 0.2607463736323097, mae 0.14842918515205383, r2: 0.9319754311133603, nrmse: -206.16731559518547\n",
      "INFO logger 2023-08-24 11:47:15,534 | train_utils.py:98 | Epoch 37 [Test]: loss 0.0004566336431872775, mse: 0.05769747495651245, rmse: 0.24020298698499246, mae 0.1670873463153839, r2: 0.9376497717804795, nrmse: -8.884527164592145\n",
      "INFO logger 2023-08-24 11:47:15,534 | helpers.py:135 | EarlyStopping counter: 6 out of 50\n",
      "INFO logger 2023-08-24 11:47:16,419 | train_utils.py:96 | Epoch 38 [Train]: loss 0.07086558808805421, mse: 0.06860239058732986, rmse: 0.26192058068683693, mae 0.1506403684616089, r2: 0.931361382578483, nrmse: -207.0957393082845\n",
      "INFO logger 2023-08-24 11:47:16,420 | train_utils.py:98 | Epoch 38 [Test]: loss 0.00045886977592058347, mse: 0.05796804651618004, rmse: 0.24076554262639005, mae 0.16832521557807922, r2: 0.9373573779461504, nrmse: -8.905334736306079\n",
      "INFO logger 2023-08-24 11:47:16,420 | helpers.py:135 | EarlyStopping counter: 7 out of 50\n",
      "INFO logger 2023-08-24 11:47:17,395 | train_utils.py:96 | Epoch 39 [Train]: loss 0.06919880064378958, mse: 0.0669911801815033, rmse: 0.25882654458440557, mae 0.14625902473926544, r2: 0.9329734386184085, nrmse: -204.64934241805437\n",
      "INFO logger 2023-08-24 11:47:17,396 | train_utils.py:98 | Epoch 39 [Test]: loss 0.00045129913640649694, mse: 0.05702158063650131, rmse: 0.23879191911892939, mae 0.1657034009695053, r2: 0.9383801700581874, nrmse: -8.832335179203122\n",
      "INFO logger 2023-08-24 11:47:17,396 | helpers.py:147 | Validation loss decreased (0.000453 --> 0.000451). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:18,420 | train_utils.py:96 | Epoch 40 [Train]: loss 0.07059668251895346, mse: 0.06788457930088043, rmse: 0.260546693129812, mae 0.14985410869121552, r2: 0.93207956847203, nrmse: -206.00943192990889\n",
      "INFO logger 2023-08-24 11:47:18,420 | train_utils.py:98 | Epoch 40 [Test]: loss 0.0004551190033293607, mse: 0.057493001222610474, rmse: 0.23977698226187283, mae 0.16854184865951538, r2: 0.9378707351853376, nrmse: -8.86877028087347\n",
      "INFO logger 2023-08-24 11:47:18,421 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:47:19,275 | train_utils.py:96 | Epoch 41 [Train]: loss 0.06955793105589692, mse: 0.06675610691308975, rmse: 0.25837203198699693, mae 0.14644867181777954, r2: 0.9332086360034112, nrmse: -204.28996774753998\n",
      "INFO logger 2023-08-24 11:47:19,275 | train_utils.py:98 | Epoch 41 [Test]: loss 0.0004504581009261092, mse: 0.05691753327846527, rmse: 0.23857395767028988, mae 0.16549023985862732, r2: 0.9384926063058686, nrmse: -8.824273312714373\n",
      "INFO logger 2023-08-24 11:47:19,275 | helpers.py:147 | Validation loss decreased (0.000451 --> 0.000450). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:20,080 | train_utils.py:96 | Epoch 42 [Train]: loss 0.07100599733530544, mse: 0.06830117106437683, rmse: 0.26134492737448883, mae 0.15394899249076843, r2: 0.9316627577640348, nrmse: -206.64058092403928\n",
      "INFO logger 2023-08-24 11:47:20,081 | train_utils.py:98 | Epoch 42 [Test]: loss 0.0004672719263716748, mse: 0.05894380807876587, rmse: 0.24278345923634473, mae 0.17504170536994934, r2: 0.9363029361117498, nrmse: -8.979972588074942\n",
      "INFO logger 2023-08-24 11:47:20,081 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:47:20,947 | train_utils.py:96 | Epoch 43 [Train]: loss 0.06750593015021877, mse: 0.06659874320030212, rmse: 0.25806732299983687, mae 0.14689667522907257, r2: 0.9333660857874525, nrmse: -204.04904000981003\n",
      "INFO logger 2023-08-24 11:47:20,948 | train_utils.py:98 | Epoch 43 [Test]: loss 0.0004559679773815891, mse: 0.05762748420238495, rmse: 0.24005725192625393, mae 0.1666506826877594, r2: 0.9377254021623699, nrmse: -8.879136777468121\n",
      "INFO logger 2023-08-24 11:47:20,948 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:47:21,773 | train_utils.py:96 | Epoch 44 [Train]: loss 0.07004034855344798, mse: 0.06792142987251282, rmse: 0.2606174013233054, mae 0.15289784967899323, r2: 0.9320427058689233, nrmse: -206.06533958545958\n",
      "INFO logger 2023-08-24 11:47:21,774 | train_utils.py:98 | Epoch 44 [Test]: loss 0.0004662644728059657, mse: 0.05884833633899689, rmse: 0.24258676043633726, mae 0.17337188124656677, r2: 0.9364061046694027, nrmse: -8.972697175500585\n",
      "INFO logger 2023-08-24 11:47:21,774 | helpers.py:135 | EarlyStopping counter: 3 out of 50\n",
      "INFO logger 2023-08-24 11:47:22,542 | train_utils.py:96 | Epoch 45 [Train]: loss 0.06675582272873726, mse: 0.0660814419388771, rmse: 0.2570631088641019, mae 0.14553676545619965, r2: 0.9338836662937726, nrmse: -203.25502654084738\n",
      "INFO logger 2023-08-24 11:47:22,542 | train_utils.py:98 | Epoch 45 [Test]: loss 0.0004578830772324612, mse: 0.057840313762426376, rmse: 0.24050013256218047, mae 0.16743263602256775, r2: 0.9374954188236826, nrmse: -8.895517860359513\n",
      "INFO logger 2023-08-24 11:47:22,543 | helpers.py:135 | EarlyStopping counter: 4 out of 50\n",
      "INFO logger 2023-08-24 11:47:23,469 | train_utils.py:96 | Epoch 46 [Train]: loss 0.06986233913630713, mse: 0.06710359454154968, rmse: 0.2590436151337255, mae 0.15303196012973785, r2: 0.9328609750635801, nrmse: -204.82097607042175\n",
      "INFO logger 2023-08-24 11:47:23,469 | train_utils.py:98 | Epoch 46 [Test]: loss 0.0004719010940943545, mse: 0.05951276794075966, rmse: 0.24395238867606864, mae 0.17548111081123352, r2: 0.9356880975970169, nrmse: -9.02320845908169\n",
      "INFO logger 2023-08-24 11:47:23,470 | helpers.py:135 | EarlyStopping counter: 5 out of 50\n",
      "INFO logger 2023-08-24 11:47:24,309 | train_utils.py:96 | Epoch 47 [Train]: loss 0.06582195988448802, mse: 0.0653906762599945, rmse: 0.2557160070468693, mae 0.1461184173822403, r2: 0.9345747983948616, nrmse: -202.18989814951692\n",
      "INFO logger 2023-08-24 11:47:24,310 | train_utils.py:98 | Epoch 47 [Test]: loss 0.00046145014508425843, mse: 0.058298636227846146, rmse: 0.24145110525289826, mae 0.1691373884677887, r2: 0.9370001376939668, nrmse: -8.930692038705578\n",
      "INFO logger 2023-08-24 11:47:24,310 | helpers.py:135 | EarlyStopping counter: 6 out of 50\n",
      "INFO logger 2023-08-24 11:47:25,173 | train_utils.py:96 | Epoch 48 [Train]: loss 0.06920800083025824, mse: 0.06679704785346985, rmse: 0.25845124850437434, mae 0.15124979615211487, r2: 0.9331676795757493, nrmse: -204.35260277678694\n",
      "INFO logger 2023-08-24 11:47:25,173 | train_utils.py:98 | Epoch 48 [Test]: loss 0.00046399152287614274, mse: 0.058618247509002686, rmse: 0.24211205568703656, mae 0.17328837513923645, r2: 0.9366547495110119, nrmse: -8.955138995674176\n",
      "INFO logger 2023-08-24 11:47:25,174 | helpers.py:135 | EarlyStopping counter: 7 out of 50\n",
      "INFO logger 2023-08-24 11:47:25,997 | train_utils.py:96 | Epoch 49 [Train]: loss 0.06575087161763804, mse: 0.06474706530570984, rmse: 0.2544544464255043, mae 0.14379282295703888, r2: 0.9352187472415288, nrmse: -201.1924055932669\n",
      "INFO logger 2023-08-24 11:47:25,998 | train_utils.py:98 | Epoch 49 [Test]: loss 0.00046024224693663637, mse: 0.05809466540813446, rmse: 0.2410283498017079, mae 0.1674465537071228, r2: 0.9372205508057496, nrmse: -8.915055337691886\n",
      "INFO logger 2023-08-24 11:47:25,998 | helpers.py:135 | EarlyStopping counter: 8 out of 50\n",
      "INFO logger 2023-08-24 11:47:26,854 | train_utils.py:96 | Epoch 50 [Train]: loss 0.07006770549924113, mse: 0.06668634712696075, rmse: 0.25823699798239746, mae 0.153265118598938, r2: 0.9332784378579426, nrmse: -204.18319886767208\n",
      "INFO logger 2023-08-24 11:47:26,855 | train_utils.py:98 | Epoch 50 [Test]: loss 0.00046504973842386615, mse: 0.05865582451224327, rmse: 0.24218964575770632, mae 0.1750819981098175, r2: 0.936614142092311, nrmse: -8.958008864609699\n",
      "INFO logger 2023-08-24 11:47:26,856 | helpers.py:135 | EarlyStopping counter: 9 out of 50\n",
      "INFO logger 2023-08-24 11:47:27,712 | train_utils.py:96 | Epoch 51 [Train]: loss 0.06535287152655656, mse: 0.06528221815824509, rmse: 0.2555038515526627, mae 0.1451733112335205, r2: 0.9346833092293272, nrmse: -202.02215073995552\n",
      "INFO logger 2023-08-24 11:47:27,712 | train_utils.py:98 | Epoch 51 [Test]: loss 0.00046023751298586527, mse: 0.058166541159152985, rmse: 0.24117740598810863, mae 0.16815504431724548, r2: 0.9371428809726501, nrmse: -8.920568565290553\n",
      "INFO logger 2023-08-24 11:47:27,713 | helpers.py:135 | EarlyStopping counter: 10 out of 50\n",
      "INFO logger 2023-08-24 11:47:28,522 | train_utils.py:96 | Epoch 52 [Train]: loss 0.06937478527834173, mse: 0.06495508551597595, rmse: 0.2548628759077633, mae 0.1507965475320816, r2: 0.9350106092406639, nrmse: -201.5153432003916\n",
      "INFO logger 2023-08-24 11:47:28,523 | train_utils.py:98 | Epoch 52 [Test]: loss 0.00046631775099283075, mse: 0.05879214406013489, rmse: 0.2424709138435678, mae 0.17511004209518433, r2: 0.9364668250627348, nrmse: -8.968412290398593\n",
      "INFO logger 2023-08-24 11:47:28,523 | helpers.py:135 | EarlyStopping counter: 11 out of 50\n",
      "INFO logger 2023-08-24 11:47:29,335 | train_utils.py:96 | Epoch 53 [Train]: loss 0.06461627558746841, mse: 0.06417825818061829, rmse: 0.25333428149506, mae 0.14480987191200256, r2: 0.9357878543816243, nrmse: -200.30671198412304\n",
      "INFO logger 2023-08-24 11:47:29,335 | train_utils.py:98 | Epoch 53 [Test]: loss 0.0004645917564630508, mse: 0.058674897998571396, rmse: 0.24222901972837896, mae 0.17085805535316467, r2: 0.9365935354400177, nrmse: -8.959465212494502\n",
      "INFO logger 2023-08-24 11:47:29,336 | helpers.py:135 | EarlyStopping counter: 12 out of 50\n",
      "INFO logger 2023-08-24 11:47:30,168 | train_utils.py:96 | Epoch 54 [Train]: loss 0.06745015653723385, mse: 0.06452234089374542, rmse: 0.25401248176761987, mae 0.14847378432750702, r2: 0.9354435819150518, nrmse: -200.84295234552022\n",
      "INFO logger 2023-08-24 11:47:30,168 | train_utils.py:98 | Epoch 54 [Test]: loss 0.0004554567520904262, mse: 0.05743096396327019, rmse: 0.23964758284462245, mae 0.17226561903953552, r2: 0.9379377788536026, nrmse: -8.863984109593613\n",
      "INFO logger 2023-08-24 11:47:30,169 | helpers.py:135 | EarlyStopping counter: 13 out of 50\n",
      "INFO logger 2023-08-24 11:47:31,049 | train_utils.py:96 | Epoch 55 [Train]: loss 0.06470124075713102, mse: 0.06368941813707352, rmse: 0.2523676249780734, mae 0.1446501761674881, r2: 0.9362769494573532, nrmse: -199.54239462686328\n",
      "INFO logger 2023-08-24 11:47:31,049 | train_utils.py:98 | Epoch 55 [Test]: loss 0.00046030430460882463, mse: 0.05808670446276665, rmse: 0.24101183469441215, mae 0.1703040450811386, r2: 0.9372291534654659, nrmse: -8.914444483841827\n",
      "INFO logger 2023-08-24 11:47:31,050 | helpers.py:135 | EarlyStopping counter: 14 out of 50\n",
      "INFO logger 2023-08-24 11:47:31,944 | train_utils.py:96 | Epoch 56 [Train]: loss 0.06625428091501817, mse: 0.06456205248832703, rmse: 0.2540906383327159, mae 0.14874620735645294, r2: 0.9354038596502993, nrmse: -200.9047492901815\n",
      "INFO logger 2023-08-24 11:47:31,945 | train_utils.py:98 | Epoch 56 [Test]: loss 0.00045581967161412824, mse: 0.05755208432674408, rmse: 0.23990015491188013, mae 0.17227967083454132, r2: 0.9378068929230382, nrmse: -8.873326139102632\n",
      "INFO logger 2023-08-24 11:47:31,945 | helpers.py:135 | EarlyStopping counter: 15 out of 50\n",
      "INFO logger 2023-08-24 11:47:32,803 | train_utils.py:96 | Epoch 57 [Train]: loss 0.06486640353978146, mse: 0.06372945010662079, rmse: 0.25244692532613816, mae 0.1439208984375, r2: 0.9362368955743751, nrmse: -199.605095939478\n",
      "INFO logger 2023-08-24 11:47:32,804 | train_utils.py:98 | Epoch 57 [Test]: loss 0.00045827636767549126, mse: 0.05786241963505745, rmse: 0.2405460863016845, mae 0.16923053562641144, r2: 0.9374715254303029, nrmse: -8.897217577969451\n",
      "INFO logger 2023-08-24 11:47:32,804 | helpers.py:135 | EarlyStopping counter: 16 out of 50\n",
      "INFO logger 2023-08-24 11:47:33,661 | train_utils.py:96 | Epoch 58 [Train]: loss 0.06718514654494356, mse: 0.06356778740882874, rmse: 0.2521265305532695, mae 0.14714570343494415, r2: 0.9363986449201716, nrmse: -199.35176574227185\n",
      "INFO logger 2023-08-24 11:47:33,661 | train_utils.py:98 | Epoch 58 [Test]: loss 0.00045303844176886374, mse: 0.05715305358171463, rmse: 0.2390670482975741, mae 0.17221401631832123, r2: 0.9382380973321952, nrmse: -8.842511541671062\n",
      "INFO logger 2023-08-24 11:47:33,662 | helpers.py:135 | EarlyStopping counter: 17 out of 50\n",
      "INFO logger 2023-08-24 11:47:34,572 | train_utils.py:96 | Epoch 59 [Train]: loss 0.06377550700563006, mse: 0.0637478455901146, rmse: 0.252483357055697, mae 0.14341728389263153, r2: 0.9362184938941229, nrmse: -199.63390183150636\n",
      "INFO logger 2023-08-24 11:47:34,573 | train_utils.py:98 | Epoch 59 [Test]: loss 0.00045861950612556166, mse: 0.057914797216653824, rmse: 0.24065493391296555, mae 0.1682550013065338, r2: 0.9374149260484494, nrmse: -8.901243587684677\n",
      "INFO logger 2023-08-24 11:47:34,573 | helpers.py:135 | EarlyStopping counter: 18 out of 50\n",
      "INFO logger 2023-08-24 11:47:35,529 | train_utils.py:96 | Epoch 60 [Train]: loss 0.06615541268547531, mse: 0.0622345469892025, rmse: 0.24946852905567568, mae 0.14511120319366455, r2: 0.9377325920466644, nrmse: -197.25013331696474\n",
      "INFO logger 2023-08-24 11:47:35,530 | train_utils.py:98 | Epoch 60 [Test]: loss 0.0004434090608741805, mse: 0.055884361267089844, rmse: 0.23639873364104522, mae 0.17023395001888275, r2: 0.9396090986639494, nrmse: -8.743817040211379\n",
      "INFO logger 2023-08-24 11:47:35,531 | helpers.py:147 | Validation loss decreased (0.000450 --> 0.000443). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:36,407 | train_utils.py:96 | Epoch 61 [Train]: loss 0.06186901358887553, mse: 0.06213260814547539, rmse: 0.2492641332913249, mae 0.1421031504869461, r2: 0.9378345856356023, nrmse: -197.08852138170292\n",
      "INFO logger 2023-08-24 11:47:36,408 | train_utils.py:98 | Epoch 61 [Test]: loss 0.0004493067552994566, mse: 0.05676206201314926, rmse: 0.23824790033313883, mae 0.1680188924074173, r2: 0.9386606172432495, nrmse: -8.812213240916373\n",
      "INFO logger 2023-08-24 11:47:36,408 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:47:37,200 | train_utils.py:96 | Epoch 62 [Train]: loss 0.06417750212131068, mse: 0.06218601390719414, rmse: 0.24937123712889211, mae 0.1435997486114502, r2: 0.9377811434732017, nrmse: -197.173206397159\n",
      "INFO logger 2023-08-24 11:47:37,201 | train_utils.py:98 | Epoch 62 [Test]: loss 0.00044520744585502915, mse: 0.05609140545129776, rmse: 0.23683624184507268, mae 0.16950896382331848, r2: 0.9393853513846223, nrmse: -8.759999409848842\n",
      "INFO logger 2023-08-24 11:47:37,201 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:47:38,009 | train_utils.py:96 | Epoch 63 [Train]: loss 0.06238440400920808, mse: 0.061861805617809296, rmse: 0.24872033615651393, mae 0.1415012776851654, r2: 0.9381055279268306, nrmse: -196.65855108547004\n",
      "INFO logger 2023-08-24 11:47:38,010 | train_utils.py:98 | Epoch 63 [Test]: loss 0.0004542074226147947, mse: 0.057404134422540665, rmse: 0.23959159923198614, mae 0.1677185744047165, r2: 0.9379667694260895, nrmse: -8.861913411250173\n",
      "INFO logger 2023-08-24 11:47:38,010 | helpers.py:135 | EarlyStopping counter: 3 out of 50\n",
      "INFO logger 2023-08-24 11:47:38,829 | train_utils.py:96 | Epoch 64 [Train]: loss 0.06457801163196564, mse: 0.06117165461182594, rmse: 0.24732904118163307, mae 0.1423150599002838, r2: 0.9387960481600833, nrmse: -195.55847998504987\n",
      "INFO logger 2023-08-24 11:47:38,830 | train_utils.py:98 | Epoch 64 [Test]: loss 0.00043297618932542747, mse: 0.054601479321718216, rmse: 0.23366959434577322, mae 0.16663339734077454, r2: 0.9409954340604187, nrmse: -8.64287278256851\n",
      "INFO logger 2023-08-24 11:47:38,830 | helpers.py:147 | Validation loss decreased (0.000443 --> 0.000433). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:39,644 | train_utils.py:96 | Epoch 65 [Train]: loss 0.061470303175156005, mse: 0.06083489954471588, rmse: 0.2466473181380975, mae 0.14064159989356995, r2: 0.9391329825707817, nrmse: -195.01945423405976\n",
      "INFO logger 2023-08-24 11:47:39,645 | train_utils.py:98 | Epoch 65 [Test]: loss 0.00044300978164575253, mse: 0.0559038482606411, rmse: 0.23643994641481608, mae 0.16722333431243896, r2: 0.9395880435967988, nrmse: -8.745341400972631\n",
      "INFO logger 2023-08-24 11:47:39,645 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:47:40,446 | train_utils.py:96 | Epoch 66 [Train]: loss 0.06231882190331817, mse: 0.06019791588187218, rmse: 0.24535263577526975, mae 0.14094100892543793, r2: 0.9397702974144014, nrmse: -193.9957729319028\n",
      "INFO logger 2023-08-24 11:47:40,446 | train_utils.py:98 | Epoch 66 [Test]: loss 0.0004334965165246997, mse: 0.05470338463783264, rmse: 0.23388754699178116, mae 0.16692861914634705, r2: 0.9408853075229485, nrmse: -8.650934323469219\n",
      "INFO logger 2023-08-24 11:47:40,447 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:47:41,367 | train_utils.py:96 | Epoch 67 [Train]: loss 0.060391570659703575, mse: 0.059717126190662384, rmse: 0.2443708783604593, mae 0.14022532105445862, r2: 0.9402513434167743, nrmse: -193.21951557515595\n",
      "INFO logger 2023-08-24 11:47:41,367 | train_utils.py:98 | Epoch 67 [Test]: loss 0.00043971815870867833, mse: 0.05549341067671776, rmse: 0.235570394312863, mae 0.16789336502552032, r2: 0.9400315758052218, nrmse: -8.713178773156047\n",
      "INFO logger 2023-08-24 11:47:41,368 | helpers.py:135 | EarlyStopping counter: 3 out of 50\n",
      "INFO logger 2023-08-24 11:47:42,220 | train_utils.py:96 | Epoch 68 [Train]: loss 0.061004785922705196, mse: 0.05791561305522919, rmse: 0.24065662894512005, mae 0.13988003134727478, r2: 0.9420538090110285, nrmse: -190.28272753571304\n",
      "INFO logger 2023-08-24 11:47:42,221 | train_utils.py:98 | Epoch 68 [Test]: loss 0.00043523682183340973, mse: 0.0549096018075943, rmse: 0.2343279791394837, mae 0.16939900815486908, r2: 0.9406624584524191, nrmse: -8.667224842706881\n",
      "INFO logger 2023-08-24 11:47:42,221 | helpers.py:135 | EarlyStopping counter: 4 out of 50\n",
      "INFO logger 2023-08-24 11:47:43,173 | train_utils.py:96 | Epoch 69 [Train]: loss 0.058905529498588294, mse: 0.05806310474872589, rmse: 0.24096287006243491, mae 0.13941176235675812, r2: 0.9419062319931701, nrmse: -190.52486670030487\n",
      "INFO logger 2023-08-24 11:47:43,174 | train_utils.py:98 | Epoch 69 [Test]: loss 0.0004350600448268199, mse: 0.054882943630218506, rmse: 0.2342710900436042, mae 0.16867461800575256, r2: 0.9406912743519249, nrmse: -8.665120652729668\n",
      "INFO logger 2023-08-24 11:47:43,174 | helpers.py:135 | EarlyStopping counter: 5 out of 50\n",
      "INFO logger 2023-08-24 11:47:44,026 | train_utils.py:96 | Epoch 70 [Train]: loss 0.058507455803919584, mse: 0.05715272203087807, rmse: 0.2390663548701031, mae 0.13885052502155304, r2: 0.9428170987658839, nrmse: -189.02532735583867\n",
      "INFO logger 2023-08-24 11:47:44,027 | train_utils.py:98 | Epoch 70 [Test]: loss 0.00043667981411978513, mse: 0.055110249668359756, rmse: 0.2347557233985143, mae 0.16881857812404633, r2: 0.9404456359883505, nrmse: -8.68304606764899\n",
      "INFO logger 2023-08-24 11:47:44,027 | helpers.py:135 | EarlyStopping counter: 6 out of 50\n",
      "INFO logger 2023-08-24 11:47:44,900 | train_utils.py:96 | Epoch 71 [Train]: loss 0.05841049303126056, mse: 0.05781317874789238, rmse: 0.24044371222365615, mae 0.13843877613544464, r2: 0.942156291544553, nrmse: -190.11437823789524\n",
      "INFO logger 2023-08-24 11:47:44,901 | train_utils.py:98 | Epoch 71 [Test]: loss 0.0004246406801785642, mse: 0.053603269159793854, rmse: 0.2315237982579628, mae 0.16582053899765015, r2: 0.9420741395224487, nrmse: -8.563504978399536\n",
      "INFO logger 2023-08-24 11:47:44,902 | helpers.py:147 | Validation loss decreased (0.000433 --> 0.000425). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:45,757 | train_utils.py:96 | Epoch 72 [Train]: loss 0.05814554980315734, mse: 0.05741708725690842, rmse: 0.23961862877687207, mae 0.13909418880939484, r2: 0.9425525957782581, nrmse: -189.46200007824584\n",
      "INFO logger 2023-08-24 11:47:45,757 | train_utils.py:98 | Epoch 72 [Test]: loss 0.0004366906370684417, mse: 0.05506249889731407, rmse: 0.23465399825554661, mae 0.16810373961925507, r2: 0.9404972317825464, nrmse: -8.679283500799333\n",
      "INFO logger 2023-08-24 11:47:45,758 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:47:46,600 | train_utils.py:96 | Epoch 73 [Train]: loss 0.05847651997464709, mse: 0.05500827357172966, rmse: 0.23453842664205296, mae 0.13934366405010223, r2: 0.9449626747765257, nrmse: -185.44517858912502\n",
      "INFO logger 2023-08-24 11:47:46,600 | train_utils.py:98 | Epoch 73 [Test]: loss 0.00043374742680822896, mse: 0.0547553151845932, rmse: 0.23399853671464102, mae 0.17060203850269318, r2: 0.940829191255755, nrmse: -8.655039564707542\n",
      "INFO logger 2023-08-24 11:47:46,601 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:47:47,357 | train_utils.py:96 | Epoch 74 [Train]: loss 0.05597847659373656, mse: 0.05520628020167351, rmse: 0.23496016726601449, mae 0.13889729976654053, r2: 0.9447645701741525, nrmse: -185.77864106880727\n",
      "INFO logger 2023-08-24 11:47:47,357 | train_utils.py:98 | Epoch 74 [Test]: loss 0.00043358874251270854, mse: 0.05463571473956108, rmse: 0.23374283890541134, mae 0.1692029982805252, r2: 0.9409584339421437, nrmse: -8.645581921567702\n",
      "INFO logger 2023-08-24 11:47:47,358 | helpers.py:135 | EarlyStopping counter: 3 out of 50\n",
      "INFO logger 2023-08-24 11:47:48,178 | train_utils.py:96 | Epoch 75 [Train]: loss 0.055766165409295354, mse: 0.05358514562249184, rmse: 0.23148465526356565, mae 0.13769030570983887, r2: 0.9463865612132537, nrmse: -183.0306182684053\n",
      "INFO logger 2023-08-24 11:47:48,178 | train_utils.py:98 | Epoch 75 [Test]: loss 0.0004329812282707259, mse: 0.05463040992617607, rmse: 0.23373149108790642, mae 0.1700509935617447, r2: 0.9409641717718241, nrmse: -8.645162193261461\n",
      "INFO logger 2023-08-24 11:47:48,179 | helpers.py:135 | EarlyStopping counter: 4 out of 50\n",
      "INFO logger 2023-08-24 11:47:48,998 | train_utils.py:96 | Epoch 76 [Train]: loss 0.055103149337810464, mse: 0.05381328985095024, rmse: 0.23197691663385442, mae 0.137117400765419, r2: 0.9461582956328728, nrmse: -183.4198401926448\n",
      "INFO logger 2023-08-24 11:47:48,999 | train_utils.py:98 | Epoch 76 [Test]: loss 0.0004282035826765306, mse: 0.05405839905142784, rmse: 0.2325046215700407, mae 0.16847848892211914, r2: 0.9415823048895101, nrmse: -8.599783258987138\n",
      "INFO logger 2023-08-24 11:47:48,999 | helpers.py:135 | EarlyStopping counter: 5 out of 50\n",
      "INFO logger 2023-08-24 11:47:49,752 | train_utils.py:96 | Epoch 77 [Train]: loss 0.05436433960858267, mse: 0.052701786160469055, rmse: 0.22956869595062185, mae 0.1370299905538559, r2: 0.947270387034667, nrmse: -181.5157048188467\n",
      "INFO logger 2023-08-24 11:47:49,753 | train_utils.py:98 | Epoch 77 [Test]: loss 0.00042351078002424966, mse: 0.053391966968774796, rmse: 0.23106701834916812, mae 0.16785427927970886, r2: 0.9423024828490054, nrmse: -8.546609795042889\n",
      "INFO logger 2023-08-24 11:47:49,753 | helpers.py:147 | Validation loss decreased (0.000425 --> 0.000424). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:50,554 | train_utils.py:96 | Epoch 78 [Train]: loss 0.05389947461662814, mse: 0.05264563485980034, rmse: 0.22944636597645285, mae 0.1364278495311737, r2: 0.9473265641488283, nrmse: -181.4189807799275\n",
      "INFO logger 2023-08-24 11:47:50,554 | train_utils.py:98 | Epoch 78 [Test]: loss 0.000434509194210956, mse: 0.054783377796411514, rmse: 0.23405849225441813, mae 0.16954855620861053, r2: 0.9407988627178479, nrmse: -8.657257175023316\n",
      "INFO logger 2023-08-24 11:47:50,555 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:47:51,417 | train_utils.py:96 | Epoch 79 [Train]: loss 0.05292418027238455, mse: 0.05116940662264824, rmse: 0.22620655742627852, mae 0.1356353759765625, r2: 0.9488035717783765, nrmse: -178.8573243222478\n",
      "INFO logger 2023-08-24 11:47:51,418 | train_utils.py:98 | Epoch 79 [Test]: loss 0.0004301117918296167, mse: 0.054259128868579865, rmse: 0.23293589003968423, mae 0.16878056526184082, r2: 0.9413653894388975, nrmse: -8.615734835950747\n",
      "INFO logger 2023-08-24 11:47:51,418 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:47:52,337 | train_utils.py:96 | Epoch 80 [Train]: loss 0.05298546941776294, mse: 0.05041712522506714, rmse: 0.22453758087471046, mae 0.13506780564785004, r2: 0.9495562490077701, nrmse: -177.53769555566208\n",
      "INFO logger 2023-08-24 11:47:52,338 | train_utils.py:98 | Epoch 80 [Test]: loss 0.00041442934358329106, mse: 0.05222317576408386, rmse: 0.2285239063294776, mae 0.16698290407657623, r2: 0.9435655216883763, nrmse: -8.452546236112404\n",
      "INFO logger 2023-08-24 11:47:52,338 | helpers.py:147 | Validation loss decreased (0.000424 --> 0.000414). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:53,261 | train_utils.py:96 | Epoch 81 [Train]: loss 0.05116215960151749, mse: 0.04945862665772438, rmse: 0.2223929555038207, mae 0.13538527488708496, r2: 0.9505152588119584, nrmse: -175.84198010039304\n",
      "INFO logger 2023-08-24 11:47:53,261 | train_utils.py:98 | Epoch 81 [Test]: loss 0.00041416387634667735, mse: 0.05228839069604874, rmse: 0.22866654914098988, mae 0.16614876687526703, r2: 0.9434950505927897, nrmse: -8.457822248495187\n",
      "INFO logger 2023-08-24 11:47:53,261 | helpers.py:147 | Validation loss decreased (0.000414 --> 0.000414). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:54,126 | train_utils.py:96 | Epoch 82 [Train]: loss 0.04997818723495584, mse: 0.049230434000492096, rmse: 0.22187932305758484, mae 0.13341163098812103, r2: 0.9507435681542232, nrmse: -175.43586046326104\n",
      "INFO logger 2023-08-24 11:47:54,127 | train_utils.py:98 | Epoch 82 [Test]: loss 0.00040439445285769235, mse: 0.05098956823348999, rmse: 0.2258086983122882, mae 0.16274382174015045, r2: 0.9448986084436038, nrmse: -8.352117262730216\n",
      "INFO logger 2023-08-24 11:47:54,128 | helpers.py:147 | Validation loss decreased (0.000414 --> 0.000404). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:54,893 | train_utils.py:96 | Epoch 83 [Train]: loss 0.049847215253976174, mse: 0.04879593476653099, rmse: 0.2208980189284888, mae 0.13164672255516052, r2: 0.9511782984048411, nrmse: -174.6599615111111\n",
      "INFO logger 2023-08-24 11:47:54,894 | train_utils.py:98 | Epoch 83 [Test]: loss 0.000399934066927921, mse: 0.05048345401883125, rmse: 0.22468523320154188, mae 0.1611509770154953, r2: 0.9454455383341797, nrmse: -8.310563007222473\n",
      "INFO logger 2023-08-24 11:47:54,894 | helpers.py:147 | Validation loss decreased (0.000404 --> 0.000400). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:55,885 | train_utils.py:96 | Epoch 84 [Train]: loss 0.0500800793961389, mse: 0.04900866746902466, rmse: 0.2213790131630021, mae 0.13178105652332306, r2: 0.9509654516760595, nrmse: -175.04027472032263\n",
      "INFO logger 2023-08-24 11:47:55,886 | train_utils.py:98 | Epoch 84 [Test]: loss 0.00039138818889507775, mse: 0.049380019307136536, rmse: 0.22221615446932866, mae 0.15827707946300507, r2: 0.9466379515336371, nrmse: -8.219237760425123\n",
      "INFO logger 2023-08-24 11:47:55,886 | helpers.py:147 | Validation loss decreased (0.000400 --> 0.000391). Caching model ...\n",
      "INFO logger 2023-08-24 11:47:56,815 | train_utils.py:96 | Epoch 85 [Train]: loss 0.04919256237189984, mse: 0.04800400137901306, rmse: 0.21909815466820587, mae 0.13073351979255676, r2: 0.9519706490997245, nrmse: -173.23684226381712\n",
      "INFO logger 2023-08-24 11:47:56,816 | train_utils.py:98 | Epoch 85 [Test]: loss 0.0004004759528832129, mse: 0.05058041214942932, rmse: 0.2249008940609826, mae 0.15984667837619781, r2: 0.9453407583620373, nrmse: -8.3185397804845\n",
      "INFO logger 2023-08-24 11:47:56,816 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:47:57,635 | train_utils.py:96 | Epoch 86 [Train]: loss 0.049068845619331114, mse: 0.04665881395339966, rmse: 0.21600651368280463, mae 0.13117405772209167, r2: 0.9533165502793837, nrmse: -170.79233914814563\n",
      "INFO logger 2023-08-24 11:47:57,635 | train_utils.py:98 | Epoch 86 [Test]: loss 0.0003954710177423661, mse: 0.049912337213754654, rmse: 0.22341069180716186, mae 0.15996873378753662, r2: 0.9460627088650521, nrmse: -8.263420805608328\n",
      "INFO logger 2023-08-24 11:47:57,636 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:47:58,562 | train_utils.py:96 | Epoch 87 [Train]: loss 0.0474291469072341, mse: 0.04661722108721733, rmse: 0.21591021533780502, mae 0.13217972218990326, r2: 0.9533581644340943, nrmse: -170.71619783500574\n",
      "INFO logger 2023-08-24 11:47:58,563 | train_utils.py:98 | Epoch 87 [Test]: loss 0.00039729077303618716, mse: 0.050139401108026505, rmse: 0.22391829114216308, mae 0.1601545363664627, r2: 0.9458173356797723, nrmse: -8.282195676550417\n",
      "INFO logger 2023-08-24 11:47:58,563 | helpers.py:135 | EarlyStopping counter: 3 out of 50\n",
      "INFO logger 2023-08-24 11:47:59,523 | train_utils.py:96 | Epoch 88 [Train]: loss 0.04705263350479072, mse: 0.04621206596493721, rmse: 0.21496991874431456, mae 0.13040247559547424, r2: 0.9537635329075979, nrmse: -169.97272277975304\n",
      "INFO logger 2023-08-24 11:47:59,524 | train_utils.py:98 | Epoch 88 [Test]: loss 0.00038945532202372076, mse: 0.04912671446800232, rmse: 0.22164547021764808, mae 0.1575726717710495, r2: 0.9469116802794698, nrmse: -8.198129531089167\n",
      "INFO logger 2023-08-24 11:47:59,524 | helpers.py:147 | Validation loss decreased (0.000391 --> 0.000389). Caching model ...\n",
      "INFO logger 2023-08-24 11:48:00,369 | train_utils.py:96 | Epoch 89 [Train]: loss 0.045813016629836056, mse: 0.04536701738834381, rmse: 0.21299534593118183, mae 0.1297195851802826, r2: 0.9546090294320597, nrmse: -168.41146472404228\n",
      "INFO logger 2023-08-24 11:48:00,369 | train_utils.py:98 | Epoch 89 [Test]: loss 0.0003985686739634352, mse: 0.05026036128401756, rmse: 0.22418822735375193, mae 0.15901759266853333, r2: 0.9456866199584429, nrmse: -8.292179963734648\n",
      "INFO logger 2023-08-24 11:48:00,370 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:48:01,282 | train_utils.py:96 | Epoch 90 [Train]: loss 0.046574189531384036, mse: 0.046156276017427444, rmse: 0.21484011733711988, mae 0.12913063168525696, r2: 0.9538193522194711, nrmse: -169.87009121748426\n",
      "INFO logger 2023-08-24 11:48:01,283 | train_utils.py:98 | Epoch 90 [Test]: loss 0.00038175515569092934, mse: 0.04812909662723541, rmse: 0.21938344656613318, mae 0.15564322471618652, r2: 0.9479897475948957, nrmse: -8.114462750625318\n",
      "INFO logger 2023-08-24 11:48:01,284 | helpers.py:147 | Validation loss decreased (0.000389 --> 0.000382). Caching model ...\n",
      "INFO logger 2023-08-24 11:48:02,103 | train_utils.py:96 | Epoch 91 [Train]: loss 0.04597354038560297, mse: 0.04558056220412254, rmse: 0.21349604727985608, mae 0.12928226590156555, r2: 0.9543953687746993, nrmse: -168.80735998246158\n",
      "INFO logger 2023-08-24 11:48:02,104 | train_utils.py:98 | Epoch 91 [Test]: loss 0.0003925372122062577, mse: 0.049533192068338394, rmse: 0.22256053573879264, mae 0.1580062061548233, r2: 0.946472427397484, nrmse: -8.231975590132965\n",
      "INFO logger 2023-08-24 11:48:02,105 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:48:02,973 | train_utils.py:96 | Epoch 92 [Train]: loss 0.046502804674673826, mse: 0.045666392892599106, rmse: 0.21369696509917754, mae 0.12935331463813782, r2: 0.9543094925242843, nrmse: -168.96622197117387\n",
      "INFO logger 2023-08-24 11:48:02,974 | train_utils.py:98 | Epoch 92 [Test]: loss 0.0003882628054036732, mse: 0.049053534865379333, rmse: 0.22148032613615895, mae 0.15641358494758606, r2: 0.9469907627458856, nrmse: -8.19202125118608\n",
      "INFO logger 2023-08-24 11:48:02,974 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:48:03,865 | train_utils.py:96 | Epoch 93 [Train]: loss 0.045992967614438385, mse: 0.04522986710071564, rmse: 0.21267314616734206, mae 0.130011647939682, r2: 0.9547462522167832, nrmse: -168.1567073539938\n",
      "INFO logger 2023-08-24 11:48:03,866 | train_utils.py:98 | Epoch 93 [Test]: loss 0.00039461018484935424, mse: 0.049797173589468, rmse: 0.22315280323013648, mae 0.15862208604812622, r2: 0.9461871572452487, nrmse: -8.253882131269682\n",
      "INFO logger 2023-08-24 11:48:03,867 | helpers.py:135 | EarlyStopping counter: 3 out of 50\n",
      "INFO logger 2023-08-24 11:48:04,684 | train_utils.py:96 | Epoch 94 [Train]: loss 0.046924613270675763, mse: 0.044492706656455994, rmse: 0.2109329435068311, mae 0.12934137880802155, r2: 0.9554838023416712, nrmse: -166.78076142572914\n",
      "INFO logger 2023-08-24 11:48:04,685 | train_utils.py:98 | Epoch 94 [Test]: loss 0.000386151323933699, mse: 0.048733439296483994, rmse: 0.22075651586416198, mae 0.15776804089546204, r2: 0.9473366689116501, nrmse: -8.16524926094447\n",
      "INFO logger 2023-08-24 11:48:04,686 | helpers.py:135 | EarlyStopping counter: 4 out of 50\n",
      "INFO logger 2023-08-24 11:48:05,458 | train_utils.py:96 | Epoch 95 [Train]: loss 0.04495750056230463, mse: 0.044420044869184494, rmse: 0.21076063405955225, mae 0.12978550791740417, r2: 0.9555564996664986, nrmse: -166.64451954553616\n",
      "INFO logger 2023-08-24 11:48:05,459 | train_utils.py:98 | Epoch 95 [Test]: loss 0.0003900295755232287, mse: 0.049186427146196365, rmse: 0.22178013244246286, mae 0.1591840535402298, r2: 0.9468471579243384, nrmse: -8.20311036088412\n",
      "INFO logger 2023-08-24 11:48:05,459 | helpers.py:135 | EarlyStopping counter: 5 out of 50\n",
      "INFO logger 2023-08-24 11:48:06,223 | train_utils.py:96 | Epoch 96 [Train]: loss 0.0450394941290142, mse: 0.044118043035268784, rmse: 0.21004295521456745, mae 0.12824444472789764, r2: 0.9558586565779918, nrmse: -166.077064210036\n",
      "INFO logger 2023-08-24 11:48:06,223 | train_utils.py:98 | Epoch 96 [Test]: loss 0.00038585117228372753, mse: 0.048693977296352386, rmse: 0.22066711874756598, mae 0.15671825408935547, r2: 0.9473793187376142, nrmse: -8.161942677954796\n",
      "INFO logger 2023-08-24 11:48:06,224 | helpers.py:135 | EarlyStopping counter: 6 out of 50\n",
      "INFO logger 2023-08-24 11:48:06,983 | train_utils.py:96 | Epoch 97 [Train]: loss 0.0445171135943383, mse: 0.04374014958739281, rmse: 0.20914145831803127, mae 0.12830793857574463, r2: 0.9562367533908271, nrmse: -165.3642673546583\n",
      "INFO logger 2023-08-24 11:48:06,984 | train_utils.py:98 | Epoch 97 [Test]: loss 0.00039481906470848106, mse: 0.04979337379336357, rmse: 0.22314428917936388, mae 0.15881988406181335, r2: 0.9461912647401595, nrmse: -8.253567217136768\n",
      "INFO logger 2023-08-24 11:48:06,985 | helpers.py:135 | EarlyStopping counter: 7 out of 50\n",
      "INFO logger 2023-08-24 11:48:07,745 | train_utils.py:96 | Epoch 98 [Train]: loss 0.04407221550354734, mse: 0.04357575625181198, rmse: 0.20874806885768304, mae 0.1271776407957077, r2: 0.95640123567383, nrmse: -165.0532216135666\n",
      "INFO logger 2023-08-24 11:48:07,745 | train_utils.py:98 | Epoch 98 [Test]: loss 0.0003818501865393237, mse: 0.04814263805747032, rmse: 0.2194143068659615, mae 0.1552087962627411, r2: 0.9479751148065791, nrmse: -8.115604198429835\n",
      "INFO logger 2023-08-24 11:48:07,746 | helpers.py:135 | EarlyStopping counter: 8 out of 50\n",
      "INFO logger 2023-08-24 11:48:08,487 | train_utils.py:96 | Epoch 99 [Train]: loss 0.04365793033502996, mse: 0.04292132705450058, rmse: 0.20717462936976763, mae 0.126405268907547, r2: 0.9570560076776531, nrmse: -163.8091322290967\n",
      "INFO logger 2023-08-24 11:48:08,487 | train_utils.py:98 | Epoch 99 [Test]: loss 0.0003895802564962566, mse: 0.04915899038314819, rmse: 0.22171826804110706, mae 0.1568792313337326, r2: 0.9468768067229858, nrmse: -8.200822146398256\n",
      "INFO logger 2023-08-24 11:48:08,488 | helpers.py:135 | EarlyStopping counter: 9 out of 50\n",
      "INFO logger 2023-08-24 11:48:09,276 | train_utils.py:96 | Epoch 100 [Train]: loss 0.04439852286304813, mse: 0.043262336403131485, rmse: 0.20799600093062243, mae 0.1268385499715805, r2: 0.956714819561211, nrmse: -164.45857546947116\n",
      "INFO logger 2023-08-24 11:48:09,277 | train_utils.py:98 | Epoch 100 [Test]: loss 0.000378758567031364, mse: 0.047734569758176804, rmse: 0.21848242436904805, mae 0.15615519881248474, r2: 0.9484160931046404, nrmse: -8.08113612015172\n",
      "INFO logger 2023-08-24 11:48:09,277 | helpers.py:147 | Validation loss decreased (0.000382 --> 0.000379). Caching model ...\n",
      "INFO logger 2023-08-24 11:48:10,192 | train_utils.py:96 | Epoch 101 [Train]: loss 0.04358567095914623, mse: 0.04319539666175842, rmse: 0.2078350227025234, mae 0.12849536538124084, r2: 0.9567817963081865, nrmse: -164.33129297386398\n",
      "INFO logger 2023-08-24 11:48:10,192 | train_utils.py:98 | Epoch 101 [Test]: loss 0.0003862286955989592, mse: 0.048719242215156555, rmse: 0.22072435800145973, mae 0.15780584514141083, r2: 0.9473520116061167, nrmse: -8.164059819429527\n",
      "INFO logger 2023-08-24 11:48:10,193 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:48:10,937 | train_utils.py:96 | Epoch 102 [Train]: loss 0.04357723303837702, mse: 0.04257199913263321, rmse: 0.20632983093249802, mae 0.12709428369998932, r2: 0.9574055198039603, nrmse: -163.14116579257592\n",
      "INFO logger 2023-08-24 11:48:10,937 | train_utils.py:98 | Epoch 102 [Test]: loss 0.00038615875599677104, mse: 0.04867169260978699, rmse: 0.2206166190697949, mae 0.1571517139673233, r2: 0.9474033995539061, nrmse: -8.160074817090152\n",
      "INFO logger 2023-08-24 11:48:10,938 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:48:11,746 | train_utils.py:96 | Epoch 103 [Train]: loss 0.043095441469631623, mse: 0.04235301911830902, rmse: 0.20579849153555285, mae 0.12654398381710052, r2: 0.9576246202408006, nrmse: -162.7210455983345\n",
      "INFO logger 2023-08-24 11:48:11,746 | train_utils.py:98 | Epoch 103 [Test]: loss 0.0003916487032384203, mse: 0.049375202506780624, rmse: 0.22220531610827995, mae 0.1582043617963791, r2: 0.9466431593085695, nrmse: -8.218836875680244\n",
      "INFO logger 2023-08-24 11:48:11,747 | helpers.py:135 | EarlyStopping counter: 3 out of 50\n",
      "INFO logger 2023-08-24 11:48:12,600 | train_utils.py:96 | Epoch 104 [Train]: loss 0.04279519598640036, mse: 0.04212258383631706, rmse: 0.20523787135009236, mae 0.12639352679252625, r2: 0.9578551745378214, nrmse: -162.27777362835556\n",
      "INFO logger 2023-08-24 11:48:12,601 | train_utils.py:98 | Epoch 104 [Test]: loss 0.00037986148824120127, mse: 0.047875676304101944, rmse: 0.21880511032446648, mae 0.15620805323123932, r2: 0.9482636047591843, nrmse: -8.093071492698634\n",
      "INFO logger 2023-08-24 11:48:12,601 | helpers.py:135 | EarlyStopping counter: 4 out of 50\n",
      "INFO logger 2023-08-24 11:48:13,412 | train_utils.py:96 | Epoch 105 [Train]: loss 0.04235796425200533, mse: 0.0419568233191967, rmse: 0.2048336479175155, mae 0.1255311518907547, r2: 0.9580210229781415, nrmse: -161.95816166660853\n",
      "INFO logger 2023-08-24 11:48:13,413 | train_utils.py:98 | Epoch 105 [Test]: loss 0.0003952430996891351, mse: 0.04987211152911186, rmse: 0.22332064734169088, mae 0.15726186335086823, r2: 0.9461061776269072, nrmse: -8.260090278750443\n",
      "INFO logger 2023-08-24 11:48:13,413 | helpers.py:135 | EarlyStopping counter: 5 out of 50\n",
      "INFO logger 2023-08-24 11:48:14,295 | train_utils.py:96 | Epoch 106 [Train]: loss 0.04298204732185695, mse: 0.04160723835229874, rmse: 0.2039785242428691, mae 0.1255650669336319, r2: 0.9583707929593892, nrmse: -161.28203125663265\n",
      "INFO logger 2023-08-24 11:48:14,295 | train_utils.py:98 | Epoch 106 [Test]: loss 0.00038034579122972766, mse: 0.04795137792825699, rmse: 0.2189780306977323, mae 0.15614035725593567, r2: 0.9481817982869738, nrmse: -8.099467398814856\n",
      "INFO logger 2023-08-24 11:48:14,296 | helpers.py:135 | EarlyStopping counter: 6 out of 50\n",
      "INFO logger 2023-08-24 11:48:15,209 | train_utils.py:96 | Epoch 107 [Train]: loss 0.04248900710808812, mse: 0.04214056581258774, rmse: 0.2052816743223509, mae 0.12721720337867737, r2: 0.9578371822613516, nrmse: -162.31240782510326\n",
      "INFO logger 2023-08-24 11:48:15,210 | train_utils.py:98 | Epoch 107 [Test]: loss 0.0003944543783950527, mse: 0.04975252225995064, rmse: 0.22305273425795666, mae 0.15956321358680725, r2: 0.9462354125376694, nrmse: -8.250180822169309\n",
      "INFO logger 2023-08-24 11:48:15,211 | helpers.py:135 | EarlyStopping counter: 7 out of 50\n",
      "INFO logger 2023-08-24 11:48:16,068 | train_utils.py:96 | Epoch 108 [Train]: loss 0.04255372704210458, mse: 0.04109537973999977, rmse: 0.2027199539759216, mae 0.12506450712680817, r2: 0.9588829196337895, nrmse: -160.28690311808995\n",
      "INFO logger 2023-08-24 11:48:16,069 | train_utils.py:98 | Epoch 108 [Test]: loss 0.00038407738293297805, mse: 0.04853981360793114, rmse: 0.22031752905279947, mae 0.15652616322040558, r2: 0.9475459106454968, nrmse: -8.149012201200115\n",
      "INFO logger 2023-08-24 11:48:16,069 | helpers.py:135 | EarlyStopping counter: 8 out of 50\n",
      "INFO logger 2023-08-24 11:48:16,920 | train_utils.py:96 | Epoch 109 [Train]: loss 0.0419790959203965, mse: 0.04095940664410591, rmse: 0.2023843043422733, mae 0.12487280368804932, r2: 0.9590189615788315, nrmse: -160.02151118574668\n",
      "INFO logger 2023-08-24 11:48:16,921 | train_utils.py:98 | Epoch 109 [Test]: loss 0.0003902476869131389, mse: 0.04917177930474281, rmse: 0.22174710664345276, mae 0.15850117802619934, r2: 0.9468629904026772, nrmse: -8.201888816505672\n",
      "INFO logger 2023-08-24 11:48:16,921 | helpers.py:135 | EarlyStopping counter: 9 out of 50\n",
      "INFO logger 2023-08-24 11:48:17,698 | train_utils.py:96 | Epoch 110 [Train]: loss 0.04207874403800815, mse: 0.040737465023994446, rmse: 0.2018352422744711, mae 0.12449421733617783, r2: 0.9592410243881823, nrmse: -159.58737800476698\n",
      "INFO logger 2023-08-24 11:48:17,698 | train_utils.py:98 | Epoch 110 [Test]: loss 0.0003728261402650186, mse: 0.04699184000492096, rmse: 0.2167760134445713, mae 0.1532135307788849, r2: 0.9492187116138333, nrmse: -8.018020109802452\n",
      "INFO logger 2023-08-24 11:48:17,699 | helpers.py:147 | Validation loss decreased (0.000379 --> 0.000373). Caching model ...\n",
      "INFO logger 2023-08-24 11:48:18,489 | train_utils.py:96 | Epoch 111 [Train]: loss 0.041706732525199186, mse: 0.040460921823978424, rmse: 0.20114900403426914, mae 0.12429182976484299, r2: 0.9595177132579289, nrmse: -159.0447821716195\n",
      "INFO logger 2023-08-24 11:48:18,489 | train_utils.py:98 | Epoch 111 [Test]: loss 0.0003770555276968326, mse: 0.047565046697854996, rmse: 0.21809412348308468, mae 0.15542398393154144, r2: 0.9485992880621303, nrmse: -8.06677381012103\n",
      "INFO logger 2023-08-24 11:48:18,490 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:48:19,351 | train_utils.py:96 | Epoch 112 [Train]: loss 0.0415702810860239, mse: 0.04069036617875099, rmse: 0.20171853206572515, mae 0.12484512478113174, r2: 0.9592881483509111, nrmse: -159.49509741000924\n",
      "INFO logger 2023-08-24 11:48:19,351 | train_utils.py:98 | Epoch 112 [Test]: loss 0.0003795646512281825, mse: 0.04785129055380821, rmse: 0.21874937840782133, mae 0.1563512682914734, r2: 0.948289957026861, nrmse: -8.091010103980768\n",
      "INFO logger 2023-08-24 11:48:19,352 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:48:20,147 | train_utils.py:96 | Epoch 113 [Train]: loss 0.041594006288505625, mse: 0.04000736027956009, rmse: 0.2000183998525138, mae 0.12330149114131927, r2: 0.9599715170965917, nrmse: -158.15083444031993\n",
      "INFO logger 2023-08-24 11:48:20,147 | train_utils.py:98 | Epoch 113 [Test]: loss 0.0003734679859981202, mse: 0.04712457209825516, rmse: 0.2170819478866337, mae 0.15357846021652222, r2: 0.9490752806414484, nrmse: -8.029335884411271\n",
      "INFO logger 2023-08-24 11:48:20,148 | helpers.py:135 | EarlyStopping counter: 3 out of 50\n",
      "INFO logger 2023-08-24 11:48:20,947 | train_utils.py:96 | Epoch 114 [Train]: loss 0.04117904222221114, mse: 0.040432240813970566, rmse: 0.2010776984500533, mae 0.12507747113704681, r2: 0.959546409475017, nrmse: -158.9884021703181\n",
      "INFO logger 2023-08-24 11:48:20,947 | train_utils.py:98 | Epoch 114 [Test]: loss 0.0003851097164272565, mse: 0.048521243035793304, rmse: 0.22027538000374283, mae 0.15722548961639404, r2: 0.9475659777271196, nrmse: -8.147453209881956\n",
      "INFO logger 2023-08-24 11:48:20,948 | helpers.py:135 | EarlyStopping counter: 4 out of 50\n",
      "INFO logger 2023-08-24 11:48:21,730 | train_utils.py:96 | Epoch 115 [Train]: loss 0.041225702327210456, mse: 0.04042826592922211, rmse: 0.20106781425484813, mae 0.12499213218688965, r2: 0.9595503862549174, nrmse: -158.98058692071794\n",
      "INFO logger 2023-08-24 11:48:21,731 | train_utils.py:98 | Epoch 115 [Test]: loss 0.0003820589868820202, mse: 0.048209574073553085, rmse: 0.21956678727337858, mae 0.15670928359031677, r2: 0.9479027794108524, nrmse: -8.121244079676813\n",
      "INFO logger 2023-08-24 11:48:21,732 | helpers.py:135 | EarlyStopping counter: 5 out of 50\n",
      "INFO logger 2023-08-24 11:48:22,517 | train_utils.py:96 | Epoch 116 [Train]: loss 0.040648320340551436, mse: 0.03970121592283249, rmse: 0.19925163969923182, mae 0.12337642908096313, r2: 0.9602778179346043, nrmse: -157.54457142578454\n",
      "INFO logger 2023-08-24 11:48:22,518 | train_utils.py:98 | Epoch 116 [Test]: loss 0.00038081933880409997, mse: 0.04801512509584427, rmse: 0.21912353843401733, mae 0.15586601197719574, r2: 0.9481129091806024, nrmse: -8.104849377831474\n",
      "INFO logger 2023-08-24 11:48:22,518 | helpers.py:135 | EarlyStopping counter: 6 out of 50\n",
      "INFO logger 2023-08-24 11:48:23,303 | train_utils.py:96 | Epoch 117 [Train]: loss 0.04088765672349837, mse: 0.038985345512628555, rmse: 0.19744707015458232, mae 0.122281014919281, r2: 0.960994067553624, nrmse: -156.11773179751862\n",
      "INFO logger 2023-08-24 11:48:23,303 | train_utils.py:98 | Epoch 117 [Test]: loss 0.0003755826481253083, mse: 0.04721473902463913, rmse: 0.21728952810625535, mae 0.1549980640411377, r2: 0.9489778428442717, nrmse: -8.037013774362638\n",
      "INFO logger 2023-08-24 11:48:23,304 | helpers.py:135 | EarlyStopping counter: 7 out of 50\n",
      "INFO logger 2023-08-24 11:48:24,090 | train_utils.py:96 | Epoch 118 [Train]: loss 0.03964076787815429, mse: 0.0399446003139019, rmse: 0.19986145279643572, mae 0.12385204434394836, r2: 0.960034306333398, nrmse: -158.0267393175712\n",
      "INFO logger 2023-08-24 11:48:24,090 | train_utils.py:98 | Epoch 118 [Test]: loss 0.00038428677147940587, mse: 0.04848436638712883, rmse: 0.22019165830505213, mae 0.15600956976413727, r2: 0.9476058305168229, nrmse: -8.144356546865314\n",
      "INFO logger 2023-08-24 11:48:24,091 | helpers.py:135 | EarlyStopping counter: 8 out of 50\n",
      "INFO logger 2023-08-24 11:48:24,926 | train_utils.py:96 | Epoch 119 [Train]: loss 0.040142961210221983, mse: 0.03986477851867676, rmse: 0.19966166011199235, mae 0.12243752181529999, r2: 0.9601141741903251, nrmse: -157.8687669521134\n",
      "INFO logger 2023-08-24 11:48:24,927 | train_utils.py:98 | Epoch 119 [Test]: loss 0.000382268762117938, mse: 0.048131030052900314, rmse: 0.21938785302039926, mae 0.15636955201625824, r2: 0.9479876609338038, nrmse: -8.1146257347044\n",
      "INFO logger 2023-08-24 11:48:24,927 | helpers.py:135 | EarlyStopping counter: 9 out of 50\n",
      "INFO logger 2023-08-24 11:48:25,753 | train_utils.py:96 | Epoch 120 [Train]: loss 0.04072592337615788, mse: 0.039118632674217224, rmse: 0.19778430846307607, mae 0.12260523438453674, r2: 0.9608607126312891, nrmse: -156.38437986556062\n",
      "INFO logger 2023-08-24 11:48:25,753 | train_utils.py:98 | Epoch 120 [Test]: loss 0.00038655055632368167, mse: 0.048724256455898285, rmse: 0.22073571631228664, mae 0.15656816959381104, r2: 0.9473465947453718, nrmse: -8.164479935858353\n",
      "INFO logger 2023-08-24 11:48:25,754 | helpers.py:135 | EarlyStopping counter: 10 out of 50\n",
      "INFO logger 2023-08-24 11:48:26,574 | train_utils.py:96 | Epoch 121 [Train]: loss 0.04002011400007177, mse: 0.03952698037028313, rmse: 0.1988139340445813, mae 0.12249507755041122, r2: 0.9604521455373289, nrmse: -157.19848569280563\n",
      "INFO logger 2023-08-24 11:48:26,574 | train_utils.py:98 | Epoch 121 [Test]: loss 0.0003777976164169479, mse: 0.04759341850876808, rmse: 0.21815915866350438, mae 0.1556890457868576, r2: 0.9485686254255716, nrmse: -8.06917930405075\n",
      "INFO logger 2023-08-24 11:48:26,575 | helpers.py:135 | EarlyStopping counter: 11 out of 50\n",
      "INFO logger 2023-08-24 11:48:27,417 | train_utils.py:96 | Epoch 122 [Train]: loss 0.04018553784408141, mse: 0.0392901636660099, rmse: 0.19821746559274211, mae 0.12323661893606186, r2: 0.9606890871193356, nrmse: -156.7268691643\n",
      "INFO logger 2023-08-24 11:48:27,418 | train_utils.py:98 | Epoch 122 [Test]: loss 0.0003822528326894805, mse: 0.04819364473223686, rmse: 0.2195305097981528, mae 0.15744449198246002, r2: 0.9479199975911932, nrmse: -8.119902263664649\n",
      "INFO logger 2023-08-24 11:48:27,419 | helpers.py:135 | EarlyStopping counter: 12 out of 50\n",
      "INFO logger 2023-08-24 11:48:28,285 | train_utils.py:96 | Epoch 123 [Train]: loss 0.04008209290623199, mse: 0.039449386298656464, rmse: 0.19861869574301524, mae 0.1237279549241066, r2: 0.9605297832563606, nrmse: -157.04411439331443\n",
      "INFO logger 2023-08-24 11:48:28,285 | train_utils.py:98 | Epoch 123 [Test]: loss 0.00037315649944439267, mse: 0.0469791442155838, rmse: 0.2167467282696184, mae 0.15499523282051086, r2: 0.9492324345104567, nrmse: -8.01693692205506\n",
      "INFO logger 2023-08-24 11:48:28,286 | helpers.py:135 | EarlyStopping counter: 13 out of 50\n",
      "INFO logger 2023-08-24 11:48:29,088 | train_utils.py:96 | Epoch 124 [Train]: loss 0.03980295941437362, mse: 0.03869778662919998, rmse: 0.1967175300505778, mae 0.1223672553896904, r2: 0.9612817758810704, nrmse: -155.54089798477403\n",
      "INFO logger 2023-08-24 11:48:29,089 | train_utils.py:98 | Epoch 124 [Test]: loss 0.0003849526682095221, mse: 0.04851920157670975, rmse: 0.22027074607561883, mae 0.15597191452980042, r2: 0.9475681848374208, nrmse: -8.147281812095388\n",
      "INFO logger 2023-08-24 11:48:29,089 | helpers.py:135 | EarlyStopping counter: 14 out of 50\n",
      "INFO logger 2023-08-24 11:48:29,917 | train_utils.py:96 | Epoch 125 [Train]: loss 0.03910890067345463, mse: 0.038617342710494995, rmse: 0.19651295812361838, mae 0.1219261959195137, r2: 0.9613622664143652, nrmse: -155.37914676100885\n",
      "INFO logger 2023-08-24 11:48:29,917 | train_utils.py:98 | Epoch 125 [Test]: loss 0.0003761582773679878, mse: 0.047434963285923004, rmse: 0.21779569161469425, mae 0.15552785992622375, r2: 0.9487398581060372, nrmse: -8.055735537555083\n",
      "INFO logger 2023-08-24 11:48:29,918 | helpers.py:135 | EarlyStopping counter: 15 out of 50\n",
      "INFO logger 2023-08-24 11:48:30,707 | train_utils.py:96 | Epoch 126 [Train]: loss 0.03900966679066187, mse: 0.038784001022577286, rmse: 0.19693654059766888, mae 0.12123079597949982, r2: 0.9611955208034841, nrmse: -155.71406555735348\n",
      "INFO logger 2023-08-24 11:48:30,708 | train_utils.py:98 | Epoch 126 [Test]: loss 0.00039103130281668657, mse: 0.0492706261575222, rmse: 0.22196987668943324, mae 0.15654608607292175, r2: 0.946756171192479, nrmse: -8.210128541372603\n",
      "INFO logger 2023-08-24 11:48:30,708 | helpers.py:135 | EarlyStopping counter: 16 out of 50\n",
      "INFO logger 2023-08-24 11:48:31,472 | train_utils.py:96 | Epoch 127 [Train]: loss 0.038832274709420744, mse: 0.03882041573524475, rmse: 0.1970289718169507, mae 0.12183699011802673, r2: 0.9611590805553244, nrmse: -155.7871492060005\n",
      "INFO logger 2023-08-24 11:48:31,473 | train_utils.py:98 | Epoch 127 [Test]: loss 0.0003847277804947736, mse: 0.048486221581697464, rmse: 0.22019587094606807, mae 0.15706852078437805, r2: 0.9476038298228675, nrmse: -8.14451236226133\n",
      "INFO logger 2023-08-24 11:48:31,473 | helpers.py:135 | EarlyStopping counter: 17 out of 50\n",
      "INFO logger 2023-08-24 11:48:32,431 | train_utils.py:96 | Epoch 128 [Train]: loss 0.038774068794737104, mse: 0.03804134204983711, rmse: 0.19504189819071469, mae 0.1202809289097786, r2: 0.9619385703849151, nrmse: -154.21600698950792\n",
      "INFO logger 2023-08-24 11:48:32,432 | train_utils.py:98 | Epoch 128 [Test]: loss 0.00037707692197366067, mse: 0.04759592562913895, rmse: 0.21816490466878247, mae 0.15390318632125854, r2: 0.9485659170653317, nrmse: -8.069391834879868\n",
      "INFO logger 2023-08-24 11:48:32,432 | helpers.py:135 | EarlyStopping counter: 18 out of 50\n",
      "INFO logger 2023-08-24 11:48:33,307 | train_utils.py:96 | Epoch 129 [Train]: loss 0.03909598357131472, mse: 0.03794822469353676, rmse: 0.1948030407707661, mae 0.11998286843299866, r2: 0.9620317385117844, nrmse: -154.0271468631146\n",
      "INFO logger 2023-08-24 11:48:33,307 | train_utils.py:98 | Epoch 129 [Test]: loss 0.0003800197640619083, mse: 0.04791124165058136, rmse: 0.2188863669820059, mae 0.1553279459476471, r2: 0.9482251766682708, nrmse: -8.096076979808826\n",
      "INFO logger 2023-08-24 11:48:33,308 | helpers.py:135 | EarlyStopping counter: 19 out of 50\n",
      "INFO logger 2023-08-24 11:48:34,306 | train_utils.py:96 | Epoch 130 [Train]: loss 0.03835868437454337, mse: 0.03790846839547157, rmse: 0.19470097173735823, mae 0.12020977586507797, r2: 0.9620715120893254, nrmse: -153.94644277381147\n",
      "INFO logger 2023-08-24 11:48:34,307 | train_utils.py:98 | Epoch 130 [Test]: loss 0.0003781759534140079, mse: 0.04770325869321823, rmse: 0.21841075681664177, mae 0.15429210662841797, r2: 0.9484499303969577, nrmse: -8.07848531083346\n",
      "INFO logger 2023-08-24 11:48:34,308 | helpers.py:135 | EarlyStopping counter: 20 out of 50\n",
      "INFO logger 2023-08-24 11:48:35,258 | train_utils.py:96 | Epoch 131 [Train]: loss 0.03836951477569528, mse: 0.038155749440193176, rmse: 0.19533496727466174, mae 0.12158427387475967, r2: 0.9618241026544434, nrmse: -154.44773127191922\n",
      "INFO logger 2023-08-24 11:48:35,259 | train_utils.py:98 | Epoch 131 [Test]: loss 0.00038790851831436155, mse: 0.0489657036960125, rmse: 0.22128195519746408, mae 0.15689638257026672, r2: 0.9470856785828049, nrmse: -8.184683990248478\n",
      "INFO logger 2023-08-24 11:48:35,260 | helpers.py:135 | EarlyStopping counter: 21 out of 50\n",
      "INFO logger 2023-08-24 11:48:36,165 | train_utils.py:96 | Epoch 132 [Train]: loss 0.038982131009106524, mse: 0.03831261768937111, rmse: 0.19573609194364516, mae 0.12176044285297394, r2: 0.9616671533504959, nrmse: -154.76489309883667\n",
      "INFO logger 2023-08-24 11:48:36,166 | train_utils.py:98 | Epoch 132 [Test]: loss 0.00038075381944402615, mse: 0.047939833253622055, rmse: 0.21895166876190292, mae 0.15533651411533356, r2: 0.9481942805147325, nrmse: -8.098492334608006\n",
      "INFO logger 2023-08-24 11:48:36,166 | helpers.py:135 | EarlyStopping counter: 22 out of 50\n",
      "INFO logger 2023-08-24 11:48:37,164 | train_utils.py:96 | Epoch 133 [Train]: loss 0.038183749638847075, mse: 0.0379478894174099, rmse: 0.19480218021729093, mae 0.12040039896965027, r2: 0.9620320721826007, nrmse: -154.02646643946218\n",
      "INFO logger 2023-08-24 11:48:37,165 | train_utils.py:98 | Epoch 133 [Test]: loss 0.00037272602690067904, mse: 0.04702591523528099, rmse: 0.21685459468335225, mae 0.15297769010066986, r2: 0.9491818968430464, nrmse: -8.020926639647644\n",
      "INFO logger 2023-08-24 11:48:37,166 | helpers.py:147 | Validation loss decreased (0.000373 --> 0.000373). Caching model ...\n",
      "INFO logger 2023-08-24 11:48:38,056 | train_utils.py:96 | Epoch 134 [Train]: loss 0.03847805966506712, mse: 0.03860386461019516, rmse: 0.19647866197171426, mae 0.12232646346092224, r2: 0.9613757527746437, nrmse: -155.35202943057462\n",
      "INFO logger 2023-08-24 11:48:38,057 | train_utils.py:98 | Epoch 134 [Test]: loss 0.00038042008353952775, mse: 0.048046983778476715, rmse: 0.21919622208988163, mae 0.15533186495304108, r2: 0.9480784866263113, nrmse: -8.107537770357537\n",
      "INFO logger 2023-08-24 11:48:38,057 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:48:38,869 | train_utils.py:96 | Epoch 135 [Train]: loss 0.037971751182340086, mse: 0.038936957716941833, rmse: 0.19732449852195705, mae 0.12210994213819504, r2: 0.9610424822753201, nrmse: -156.0208166837456\n",
      "INFO logger 2023-08-24 11:48:38,869 | train_utils.py:98 | Epoch 135 [Test]: loss 0.0003726167702361157, mse: 0.046965088695287704, rmse: 0.21671430200909148, mae 0.15365982055664062, r2: 0.9492476241215831, nrmse: -8.01573755315414\n",
      "INFO logger 2023-08-24 11:48:38,870 | helpers.py:147 | Validation loss decreased (0.000373 --> 0.000373). Caching model ...\n",
      "INFO logger 2023-08-24 11:48:39,681 | train_utils.py:96 | Epoch 136 [Train]: loss 0.03914725883805659, mse: 0.03792897239327431, rmse: 0.1947536197180281, mae 0.12144989520311356, r2: 0.9620510017717968, nrmse: -153.98807055445903\n",
      "INFO logger 2023-08-24 11:48:39,681 | train_utils.py:98 | Epoch 136 [Test]: loss 0.00037857952652967466, mse: 0.04768356680870056, rmse: 0.21836567223055128, mae 0.15601570904254913, r2: 0.9484712064673453, nrmse: -8.07681774110482\n",
      "INFO logger 2023-08-24 11:48:39,682 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:48:40,614 | train_utils.py:96 | Epoch 137 [Train]: loss 0.03817962445464218, mse: 0.038378193974494934, rmse: 0.19590353231755403, mae 0.12218284606933594, r2: 0.9616015408314884, nrmse: -154.89728509313426\n",
      "INFO logger 2023-08-24 11:48:40,615 | train_utils.py:98 | Epoch 137 [Test]: loss 0.00038116243041572514, mse: 0.04816902428865433, rmse: 0.21947442741388876, mae 0.1570381224155426, r2: 0.9479466006520937, nrmse: -8.11782791199774\n",
      "INFO logger 2023-08-24 11:48:40,615 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:48:41,356 | train_utils.py:96 | Epoch 138 [Train]: loss 0.03804059176763985, mse: 0.0375339537858963, rmse: 0.19373681577309024, mae 0.12149352580308914, r2: 0.962446227636914, nrmse: -153.18410255715125\n",
      "INFO logger 2023-08-24 11:48:41,356 | train_utils.py:98 | Epoch 138 [Test]: loss 0.00038677197799347993, mse: 0.048773232847452164, rmse: 0.22084662743055908, mae 0.15694619715213776, r2: 0.9472936696086915, nrmse: -8.16858226970322\n",
      "INFO logger 2023-08-24 11:48:41,357 | helpers.py:135 | EarlyStopping counter: 3 out of 50\n",
      "INFO logger 2023-08-24 11:48:42,152 | train_utils.py:96 | Epoch 139 [Train]: loss 0.037759115897642914, mse: 0.0382225476205349, rmse: 0.195505876179042, mae 0.12116935104131699, r2: 0.9617572683247383, nrmse: -154.58286576885027\n",
      "INFO logger 2023-08-24 11:48:42,153 | train_utils.py:98 | Epoch 139 [Test]: loss 0.00037840254583031114, mse: 0.04776308685541153, rmse: 0.21854767638987044, mae 0.1543424129486084, r2: 0.9483852770719743, nrmse: -8.083549634483152\n",
      "INFO logger 2023-08-24 11:48:42,153 | helpers.py:135 | EarlyStopping counter: 4 out of 50\n",
      "INFO logger 2023-08-24 11:48:42,978 | train_utils.py:96 | Epoch 140 [Train]: loss 0.0377898871229263, mse: 0.03755556792020798, rmse: 0.19379258995175222, mae 0.119864821434021, r2: 0.9624245985978783, nrmse: -153.22820216449796\n",
      "INFO logger 2023-08-24 11:48:42,979 | train_utils.py:98 | Epoch 140 [Test]: loss 0.0003722011062659715, mse: 0.04704621061682701, rmse: 0.2169013845433611, mae 0.1519538015127182, r2: 0.9491599584452592, nrmse: -8.022657283331531\n",
      "INFO logger 2023-08-24 11:48:42,979 | helpers.py:147 | Validation loss decreased (0.000373 --> 0.000372). Caching model ...\n",
      "INFO logger 2023-08-24 11:48:43,949 | train_utils.py:96 | Epoch 141 [Train]: loss 0.03799488450749777, mse: 0.03743985295295715, rmse: 0.19349380598085603, mae 0.1189483031630516, r2: 0.9625403772607707, nrmse: -152.99195922710086\n",
      "INFO logger 2023-08-24 11:48:43,949 | train_utils.py:98 | Epoch 141 [Test]: loss 0.0003675805775132793, mse: 0.04632432013750076, rmse: 0.21523085312636003, mae 0.15140312910079956, r2: 0.9499400698169557, nrmse: -7.960868369130485\n",
      "INFO logger 2023-08-24 11:48:43,950 | helpers.py:147 | Validation loss decreased (0.000372 --> 0.000368). Caching model ...\n",
      "INFO logger 2023-08-24 11:48:44,773 | train_utils.py:96 | Epoch 142 [Train]: loss 0.0377119843615219, mse: 0.03721942380070686, rmse: 0.19292336250622127, mae 0.12095104902982712, r2: 0.9627609201223228, nrmse: -152.54092016478938\n",
      "INFO logger 2023-08-24 11:48:44,773 | train_utils.py:98 | Epoch 142 [Test]: loss 0.00037114347347564866, mse: 0.04679054021835327, rmse: 0.21631121149481197, mae 0.15470609068870544, r2: 0.9494362447832858, nrmse: -8.000828210564952\n",
      "INFO logger 2023-08-24 11:48:44,774 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:48:45,601 | train_utils.py:96 | Epoch 143 [Train]: loss 0.03787539096083492, mse: 0.03746172413229942, rmse: 0.1935503142139, mae 0.1217065379023552, r2: 0.9625184945382964, nrmse: -153.03663923761613\n",
      "INFO logger 2023-08-24 11:48:45,601 | train_utils.py:98 | Epoch 143 [Test]: loss 0.00038036975617471494, mse: 0.047957856208086014, rmse: 0.21899282227526548, mae 0.15542283654212952, r2: 0.9481747965914227, nrmse: -8.100014503470172\n",
      "INFO logger 2023-08-24 11:48:45,602 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:48:46,654 | train_utils.py:96 | Epoch 144 [Train]: loss 0.037819451186805964, mse: 0.03742546960711479, rmse: 0.19345663495242232, mae 0.12078613042831421, r2: 0.962554766758904, nrmse: -152.96256878518088\n",
      "INFO logger 2023-08-24 11:48:46,654 | train_utils.py:98 | Epoch 144 [Test]: loss 0.00036340996361615364, mse: 0.0457252636551857, rmse: 0.21383466429740922, mae 0.15319378674030304, r2: 0.9505874337662388, nrmse: -7.909226723314945\n",
      "INFO logger 2023-08-24 11:48:46,655 | helpers.py:147 | Validation loss decreased (0.000368 --> 0.000363). Caching model ...\n",
      "INFO logger 2023-08-24 11:48:47,549 | train_utils.py:96 | Epoch 145 [Train]: loss 0.03779792693967465, mse: 0.03704993426799774, rmse: 0.19248359480225255, mae 0.1208360493183136, r2: 0.9629305025751118, nrmse: -152.19320400770664\n",
      "INFO logger 2023-08-24 11:48:47,550 | train_utils.py:98 | Epoch 145 [Test]: loss 0.0003673621110225979, mse: 0.04641568288207054, rmse: 0.21544299218603175, mae 0.15394076704978943, r2: 0.9498413338771398, nrmse: -7.968714879542289\n",
      "INFO logger 2023-08-24 11:48:47,550 | helpers.py:135 | EarlyStopping counter: 1 out of 50\n",
      "INFO logger 2023-08-24 11:48:48,534 | train_utils.py:96 | Epoch 146 [Train]: loss 0.03664254629984498, mse: 0.037299882620573044, rmse: 0.19313177527422318, mae 0.12049680948257446, r2: 0.9626804196194936, nrmse: -152.70570827024278\n",
      "INFO logger 2023-08-24 11:48:48,535 | train_utils.py:98 | Epoch 146 [Test]: loss 0.0003755124470992395, mse: 0.047198254615068436, rmse: 0.21725159289420282, mae 0.15454228222370148, r2: 0.948995654103553, nrmse: -8.035610642677202\n",
      "INFO logger 2023-08-24 11:48:48,536 | helpers.py:135 | EarlyStopping counter: 2 out of 50\n",
      "INFO logger 2023-08-24 11:48:49,381 | train_utils.py:96 | Epoch 147 [Train]: loss 0.03755004233244108, mse: 0.03710930794477463, rmse: 0.19263776354799864, mae 0.12038532644510269, r2: 0.9628710968526164, nrmse: -152.31510237206882\n",
      "INFO logger 2023-08-24 11:48:49,381 | train_utils.py:98 | Epoch 147 [Test]: loss 0.00037436950189328333, mse: 0.047121562063694, rmse: 0.21707501483057418, mae 0.15373435616493225, r2: 0.9490785323365925, nrmse: -8.02907944744657\n",
      "INFO logger 2023-08-24 11:48:49,382 | helpers.py:135 | EarlyStopping counter: 3 out of 50\n",
      "INFO logger 2023-08-24 11:48:50,166 | train_utils.py:96 | Epoch 148 [Train]: loss 0.036723762008477934, mse: 0.036542635411024094, rmse: 0.19116128115030012, mae 0.12002009153366089, r2: 0.9634380667565896, nrmse: -151.14767515834924\n",
      "INFO logger 2023-08-24 11:48:50,167 | train_utils.py:98 | Epoch 148 [Test]: loss 0.00037328513960043587, mse: 0.04705343768000603, rmse: 0.2169180436939399, mae 0.1545717865228653, r2: 0.9491521514994002, nrmse: -8.023273464994025\n",
      "INFO logger 2023-08-24 11:48:50,168 | helpers.py:135 | EarlyStopping counter: 4 out of 50\n",
      "INFO logger 2023-08-24 11:48:50,967 | train_utils.py:96 | Epoch 149 [Train]: loss 0.03706118272384629, mse: 0.037557438015937805, rmse: 0.1937974148845588, mae 0.12169083952903748, r2: 0.9624227274946302, nrmse: -153.23201714926958\n",
      "INFO logger 2023-08-24 11:48:50,968 | train_utils.py:98 | Epoch 149 [Test]: loss 0.00036834365094614305, mse: 0.046443186700344086, rmse: 0.21550681358217907, mae 0.15383614599704742, r2: 0.9498116127419789, nrmse: -7.971075478529298\n",
      "INFO logger 2023-08-24 11:48:50,968 | helpers.py:135 | EarlyStopping counter: 5 out of 50\n",
      "INFO logger 2023-08-24 11:48:51,745 | train_utils.py:96 | Epoch 150 [Train]: loss 0.0372798570751911, mse: 0.03709867224097252, rmse: 0.1926101561210429, mae 0.12225654721260071, r2: 0.9628817368823254, nrmse: -152.29327369224225\n",
      "INFO logger 2023-08-24 11:48:51,746 | train_utils.py:98 | Epoch 150 [Test]: loss 0.00037126336246728897, mse: 0.0468159094452858, rmse: 0.2163698441217856, mae 0.15505196154117584, r2: 0.9494088294774273, nrmse: -8.002996889537753\n",
      "INFO logger 2023-08-24 11:48:51,747 | helpers.py:135 | EarlyStopping counter: 6 out of 50\n"
     ]
    },
    {
     "data": {
      "text/plain": "<Figure size 640x480 with 1 Axes>",
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiEAAAGbCAYAAAASrkAJAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABU+klEQVR4nO3deXxU9b3/8deZmUwykz1kA4nsiwJKJICK1AWXVtyquNxa61KtNlaLWqtWaystXr32py2i1qttsdelrQi41KXupYiAGiQu7BACYUtC9mUyM+f3x8kMxLANJDlJzvv5eMwD5szJzPczE5033+0YpmmaiIiIiHQxl90NEBEREWdSCBERERFbKISIiIiILRRCRERExBYKISIiImILhRARERGxhUKIiIiI2EIhRERERGzhsbsB+xIOhwkGg7hcLgzDsLs5IiIichBM0yQcDuPxeHC59t/X0W1DSDAYpLi42O5miIiIyCEYM2YMXq93v+d02xASSU9jxozB7XZ36HOHQiGKi4s75bm7I6fVC86r2Wn1gvNqdlq94Lyae0u9kToO1AsC3TiERIZg3G53p30Ynfnc3ZHT6gXn1ey0esF5NTutXnBezb2l3oOZSqGJqSIiImILhRARERGxhUKIiIiI2CLmOSEVFRX88pe/ZOnSpbjdbs477zzuuOMOPJ72T7V06VIeeugh1q5dS0pKCt/73ve4/vrrO6ThIiLSM5mmSTAYJBQK7fe8yONNTU29Yo7EgfSUet1uNx6Pp0O2z4g5hEyfPp2cnBwWLlxIeXk5P/7xj5kzZw7XXnttm/PWrVvHj370I371q19xwQUXsGrVKq688koGDBjAt7/97cNuuIiI9DyBQICtW7fS0NBwwHNN08Tj8VBSUuKI/aJ6Ur1+v5++ffsecAnugcQUQkpKSli6dCn//ve/8fl85OXlUVhYyEMPPdQuhDz//PNMmTKF7373uwCMHDmSv/3tbyQlJR1Wg0VEpGcKh8Ns2LABt9tNv3798Hq9+/2yNU2TxsZGfD5ft/9S7gg9oV7TNAkEAuzcuZMNGzYwbNiwg1qKuy8xhZA1a9aQlpZGTk5O9NiQIUMoKyujpqaGlJSU6PEVK1Zw4okncuutt7Jo0SIyMjK46qqruPTSS2Nq4IG66w5F5Dk747m7I6fVC86r2Wn1gvNq7g31Njc3EwqF6N+/P36//4DnR3bejI+P77Zfyh2pp9SbkJAQ7bFpamoiPj6+zeOx/I7GFELq6+vx+XxtjkXuNzQ0tAkh1dXV/PWvf+WRRx7hf/7nfygqKuL6668nNTU1puGYztw11Wk7sjqtXnBezU6rF5xXc0+v1+Px0NzcHNPPNDY2dlJruqeeUG9zczMtLS18/fXXh/U8MYUQv9/f7s2J3E9MTGxz3Ov1MmXKFE455RQAxo8fz/nnn88bb7wRUwjRjqmHz2n1gvNqdlq94Lyae0O9TU1NlJSU4PP5SEhIOOD5PWF4oiP1pHpdLhdxcXEMHTq03WcZ+V09GDGFkGHDhlFVVUV5eTmZmZmANQE1NzeX5OTkNucOGTKEQCDQrmGmacbyktoxtQM5rV5wXs1OqxecV3NPrtftdmMYRvR2sGI9v6frCfVG2ni4v48xzSYZOHAg48aN4/7776euro7S0lIef/xxpk2b1u7cyy67jHfffZeXX34Z0zRZtmwZr776Kueff/4hN1ZERKQr3XvvveTn55Ofn8+YMWMYOXJk9H5+fj6ffPJJzM957bXX8sc//vGQ2nPaaacxYsQIFi5c2O6xf/3rX4wYMYI777wzeuzTTz/l6quvpqCggPz8fM455xz+/Oc/t+kQGDFiBMccc0ybuiK3srKyQ2rnwYp5ie6sWbOYMWMGU6ZMweVyccEFF1BYWAhAfn4+9913H+eddx4nnHACjz/+OLNmzeK+++4jIyODO+64gylTpnR4ESIiIp1hxowZzJgxA4B58+Yxe/Zs3nvvvcN6zqeffvqwfj49PZ358+czefLkNsdfeumlNitQS0tLufrqq5kxYwZPPvkkHo+HFStWcNNNN9HU1BT97gZ46qmnmDhx4mG161DEHEIyMzOZNWvWXh8rKipqc//kk0/m5JNPPrSWdZKGQJC/frSRIwgy1u7GiIiINReipf2KCtM0aQiEwBPs0OEJX5y7w55v8+bNTJkyhauvvpqXXnqJc845h7vuuotHHnmEDz74gG3btpGQkMDZZ5/NPffcg2EYXHHFFUyYMIGbbrqJO++8E6/Xy44dO1iyZAnp6elceeWVXHnllft8zXPPPZe///3v1NbWRqdC7Nixg+XLl7cJJsXFxcTFxXH22WdH9/MYO3Ysd911F5s3b+6Q+g9Xt72Kbmd5+6vtPPDmKr51ZAJnTz7w+SIi0nlM02TaHxfzacmuLnvNggHpvHjDCR0abOrr61m0aBFNTU0888wzLFy4kGeeeYbs7GyKior4/ve/z+mnn84JJ5zQ7mfnzZvHk08+yaOPPsrzzz/Pgw8+yLe//e0222Hs6aijjmLQoEG8/vrr0W0vFixYwHe+8x2ampqi502cOBGfz8dFF13E2WefzdixYxkzZgxnn312h9V9uBx37ZjGgJW2G1pimyArIiKdo3tPwTw4F1xwAV6vl5SUFC655BLmzJlDVlYWO3bsoKmpicTERLZv377Xn504cSKTJk3C4/Fw/vnnEwqF2LRp035f78ILL2T+/PnR+y+99BIXXXRRm3P69OnDyy+/zJQpU3j77bf54Q9/yMSJE7nuuuvYuHFjm3NvuOEGCgoK2ty64jIrjusJcbmsX/ewMoiIiO0Mw+DFG07Y93BMQyN+f8cuWe3I4ZiI7Ozs6N8bGxuZMWMGy5YtIzc3l6OPPjq6EdneZGVlRf8eFxcHsM9zI84991weeughNmzYQEVFBfHx8YwZM6bdeX369GH69OlMnz6dxsZGioqKmD17Ntdccw1vv/12dGXLH//4x54xJ6SncxuREKIUIiLSHRiGgd/b/uvINE0IuvF7O+ZiaZ1pz/bdc889pKam8p///If4+HjC4TDjx4/v0NfLyMjglFNOYcGCBezYsWOvq1Rvv/12AoEAf/jDHwBrc9ETTzyRPn36cN5551FdXU1GRkaHtitWjhuOcasnREREOlFdXR3x8fG4XC7q6ur4n//5H+rq6mhpaenQ17nwwgt59dVXef/99zn33HPbPX7OOefw7rvv8te//pXt27djmiZlZWU89dRTjB8/3vYAAg7sCXEZYSa5imkJj7S7KSIi0gvdc8893HvvvUyYMIHExEROOeUUJk+ezOrVqzv0db71rW/R3NzM8ccfT3p6ervHTz75ZJ588kmefvppZs+eTVNTExkZGZxxxhnce++9bc697rrr9rrp2MyZMzt1IqvjQki/bR/ynPe/eb3pDKB9chQREdmbCy+8kAsvvLDNsf79+7Nq1ao2x0aPHs28efP2+Tz/93//F/37Aw880O7xlStX7nP4ac89SjweD4sWLWrz+Defb9KkSUyaNGmfbQHatb8rOW44Jr7FWgaWYVba3BIRERFnc1wIMVxWd5PL3P/MYxEREelcDgwh1giUi/bLwURERKTrOC6E4I6EEPWEiIiI2MlxISTaE2KqJ0RERMROjgshrtaeELd6QkRERGzluBCyezhGPSEiIiJ2clwIMVzWvvxuDceIiIjYynkhJDocoxAiIiJiJ8eFEFfrPiGaEyIiInbauHGj3U2wneO2bTc0J0RERA7Svffey6uvvgpAMBikpaUFn88Xffypp56ioKAg5uf96quvuOSSS/jiiy/2+vidd97J/Pnz+dGPfsRtt93W5rGKigpOPvlksrOzo9u4b926lYcffphFixbR0NBAeno6p512Gj/96U9JSUkB4IorrqCoqIi4uLh2r3ffffdx3nnnxVzH4XJwCFFPiIiI7N+MGTOYMWMGAPPmzWP27Nltrt9yqGpraw94Vd309HRefvllbrnlFlyu3QMXCxYsID4+Pno/HA5zzTXXMGnSJN58801SUlIoLS3lF7/4BTfffDNz5syJnnv99ddz0003HXb7O4rzhmMic0I0MVVEpHswTQjU7+PWsJ/HDvFmmh3W9E2bNnHDDTcwceJETj31VB555BECgQAAdXV13HLLLUycOJFJkybxwx/+kHXr1lFaWsp1110HQH5+PkVFRXt97kmTJtHS0sJHH33U5vi8efOYOnVq9P6uXbtYv349U6dOjfZ65OXlcc8999CvXz9Coe77fee4nhBX6+oYj3pCRETsZ5rw57OgdEm7hwwgsTNeM+94uOZN2MeVag9WQ0MDV111FVOnTuUPf/gDlZWV3HzzzYTDYW677Tb+/Oc/U1dXx4cffojL5eLee+/ld7/7HU888QRPPfUUP/jBD/YZQADi4uI455xzmD9/PieddBIAy5cvxzRNxo4dy3/+8x8A+vTpw/HHH89PfvITzj33XAoKCjj22GMZMWIE999//2HV2NkcF0IMj1bHiIh0L4cXBuzywQcfEAgEuPXWWzEMg759+/LTn/6Um2++mdtuu42EhARWrlzJggULmDRpEvfff3+bYZWDcdFFF3HppZdSW1tLcnIyc+fO5aKLLmp33lNPPcWLL77I22+/zd/+9jcaGxsZOXIkP/vZz5g8eXL0vP/93//lmWeeaffzn3zySexvQAdwXAiJ9IQohIiIdAOGYfVKtDS0e8g0TRoaGvH7fRiH2WvRRpz/sHtBALZs2UJlZSXjx4+PHjNNk5aWFioqKrjuuuvwer3MnTuXGTNmkJeXx2233caZZ5550K8xcuRIBg8ezBtvvMG5557L22+/zeuvv86HH37Y5jyv18vll1/O5ZdfTigUYuXKlTz//PPccMMNvPrqqwwePBiAH/3oR91qTojzQohH27aLiHQrhgHevQy8mCYEDfB2TGjoaLm5uRx55JG8+eab0WN1dXVUVFSQkZHBqlWrOO2007jqqquora3l+eef55ZbbuHjjz+O6XW++93vMn/+fLxeL+PHj6dPnz5tHv/HP/7B7Nmzef/993G73bjdbkaNGsXMmTN55513WL16dTSEdDeOm5iqzcpERKQjnHrqqdTX1/P0008TCASoqanhjjvu4JZbbsEwDF588UV+/vOfU1FRQVJSEklJSfj9frxeb3R1S21t7QFf59xzz+WLL77g6aefZtq0ae0eP+WUU2hubuZXv/oVGzduJBQKUVVVxV/+8hcAJkyY0LGFdyDHhRBX61V0NTFVREQOR1JSEnPmzGHJkiV861vf4vTTT8flcvHEE08AcOuttzJgwACmTp3Kcccdx7x583j88ceJj49n+PDhjBs3jsmTJ7cbWvmm9PR0Tj31VKqrq9vM74jIzs6OzgO54ooryM/P56yzzqKoqIjnn3+ejIyM6LlPPvkk+fn57W6RZchdzTDNDlyr1IFCoRDLly9n7NixuN3uDnve8q0byXzyWFpMN657d3boc3dXnfVedmdOq9lp9YLzau4N9TY1NbFhwwYGDRpEQkLCAc+35oQ04Pf7O3ZOSDfVk+rd32cZy++qY3tC4owQZli9ISIiInZxXAhxe3ZvV9udN3ARERHp7RwXQow2ISRoY0tERESczXEhxO3evSo5HNz/vv0iIiLSeZwXQvbsCQmrJ0REpKt10/UQEoOO+gwdF0IiE1MBTPWEiIh0mcgl5Bsa2u+OKj1L5DOMfKaHynE7pu65XEhzQkREuo7b7SYtLY0dO3YAHHApqmmaNDc343K5uv2S1Y7QE+qNLCPesWMHaWlph71c3HEhxOUyaDHdxBkhwgohIiJdKjc3FyAaRPYnch2WuLi4bvul3JF6Ur1paWnRz/JwOC6EGIZBCBdxhDQxVUSki0WuNpudnU1Ly/7/Hxy5ENvQoUN77AZtsegp9cbFxXVY+xwXQgCCuIEWwmHtEyIiYofIhdb2J7KXU0JCQrf+Uu4oTqsXHDgxFSCE9eFqYqqIiIh9HBpCrLLDWqIrIiJiG2eGEMPqCdHEVBEREfs4MoSEIz0hGo4RERGxjSNDSGROiIZjRERE7OPoEGJqOEZERMQ2Dg0hVtkKISIiIvZxZAgJt05MNbVPiIiIiG0cGUJ2D8doYqqIiIhdnBlCDE1MFRERsZsjQ0i4tScEzQkRERGxjSNDiGm0TkxVT4iIiIhtHBlCtERXRETEfjGHkIqKCgoLCykoKGDixInMnDmTYHDvX+bXXnstY8aMIT8/P3r797//fdiNPlxaHSMiImI/T6w/MH36dHJycli4cCHl5eX8+Mc/Zs6cOVx77bXtzv3iiy/405/+xIQJEzqksR0lMidEwzEiIiL2iaknpKSkhKVLl3L77bfj8/nIy8ujsLCQ5557rt25paWlVFdXc/TRR3dYYztKpCdEE1NFRETsE1MIWbNmDWlpaeTk5ESPDRkyhLKyMmpqatqcW1xcTGJiIrfccgvHH38855xzDnPnzu2YVh+msGF1AKknRERExD4xDcfU19fj8/naHIvcb2hoICUlJXo8EAgwduxYbrnlFoYNG8aSJUu46aabSExM5Dvf+c5Bv2Yo1PHzNsKR1THBlk55/u4mUqMTao1wWs1OqxecV7PT6gXn1dxb6o2l/TGFEL/fT2NjY5tjkfuJiYltjl9wwQVccMEF0fsnnXQSF1xwAW+88UZMIaS4uDiWJh6UQMgEoLJiJ8uXL+/w5++uOuO97O6cVrPT6gXn1ey0esF5NTup3phCyLBhw6iqqqK8vJzMzEwA1q1bR25uLsnJyW3OnTt3brtej0AgQHx8fEwNHDNmDG63O6afOZCP346HIKSnJjN67NgOfe7uKBQKUVxc3CnvZXfltJqdVi84r2an1QvOq7m31Bup42DEFEIGDhzIuHHjuP/++5kxYwa7du3i8ccfZ9q0ae3Oraur4+GHH2bAgAGMHDmSf//737z22mv86U9/iuUlcbvdHf5hhF3W8xmEe/QHHavOeC+7O6fV7LR6wXk1O61ecF7NTqo35iW6s2bNYsaMGUyZMgWXy8UFF1xAYWEhAPn5+dx3332cd955XHnllTQ0NPCTn/yEiooK8vLyePDBBykoKOjwImJltk5MRfuEiIiI2CbmEJKZmcmsWbP2+lhRUVH074ZhUFhYGA0o3YmW6IqIiNjPkdu2m5EQYqonRERExC6ODCHh6HCMekJERETs4sgQErmKrkKIiIiIfZwZQlzqCREREbGbI0MIrXNCDM0JERERsY0jQ0h0dYyW6IqIiNjGkSGE1uEYQ8MxIiIitnFkCNESXREREfs5M4REe0IUQkREROziyBBCdE6IhmNERETs4sgQYrq0OkZERMRujgwh0YmppnpCRERE7OLIEBKZmKo5ISIiIvZxZAiJ9IS4NBwjIiJiG4eGkMicEA3HiIiI2MWZISRyFV0zbG87REREHMyZIcRt9YS41BMiIiJiG0eGENOIrI7RnBARERG7ODKEGJGJqVodIyIiYhtHhhDc6gkRERGxmzNDiBGZE6IQIiIiYhdHhhCjtSfEhSamioiI2MWRIUQ9ISIiIvZzZgiJzgnRPiEiIiJ2cWQIMbRtu4iIiO0cGUIiPSEKISIiIvZxZAgxjMjEVIUQERERuzgzhKgnRERExHaODiFu9YSIiIjYxpkhxKUluiIiInZzZghRT4iIiIjtnBlCXNonRERExG7ODCHuOEA9ISIiInZyZAhxtQ7HeAiBadrcGhEREWdyZAiJDMcAoCEZERERWzgyhLg8cbvvhHUlXRERETs4MoQYbvfuOwohIiIitnBkCHG5FEJERETs5swQ4t5zOEYrZEREROzgzBDSpidEIURERMQOjgwhbreLFrM1iGg4RkRExBaODCEuwyAUKV0hRERExBaODCFul0EQ9YSIiIjYybEhZHdPiOaEiIiI2MGRIcRloOEYERERmzkyhFg9IRqOERERsZMjQ4jL0JwQERERuzkyhLSZE2JqToiIiIgdnBlCDINgdJ8QhRARERE7ODKEuFzaJ0RERMRujgwhbgPNCREREbFZzCGkoqKCwsJCCgoKmDhxIjNnziQY3P8X+erVqzn22GNZsmTJITe0I7n2WB1jhhRCRERE7BBzCJk+fTp+v5+FCxcyd+5cFi9ezJw5c/Z5fmNjI7fddhtNTU2H084OZU1MNQAIK4SIiIjYIqYQUlJSwtKlS7n99tvx+Xzk5eVRWFjIc889t8+fue+++zj99NMPu6EdyW3s7gkJh1psbo2IiIgzeWI5ec2aNaSlpZGTkxM9NmTIEMrKyqipqSElJaXN+QsWLKCkpISZM2fy+OOPH1IDQ6GOX71imuHonJBQSwBXJ7xGdxJ5DzvjveyunFaz0+oF59XstHrBeTX3lnpjaX9MIaS+vh6fz9fmWOR+Q0NDmxCybt06HnnkEV544QXcbncsL9NGcXHxIf/svjQHTdytnUAbNqynIbi8w1+jO+qM97K7c1rNTqsXnFez0+oF59XspHpjCiF+v5/GxsY2xyL3ExMTo8eam5u55ZZb+MUvfkG/fv0Oq4Fjxow5rBCzN43NLXz+mvWcR/bvR8LYsR36/N1NKBSiuLi4U97L7sppNTutXnBezU6rF5xXc2+pN1LHwYgphAwbNoyqqirKy8vJzMwErB6P3NxckpOTo+cVFxezceNG7r77bu6+++7o8RtuuIHzzz+fX//61wf9mm63u8M/jDhPeI8dU8M9+sOORWe8l92d02p2Wr3gvJqdVi84r2Yn1RtTCBk4cCDjxo3j/vvvZ8aMGezatYvHH3+cadOmtTmvoKCAFStWtDk2YsQI/vjHPzJx4sTDb/Vhcrt2XztGS3RFRETsEfMS3VmzZhEMBpkyZQqXXHIJkydPprCwEID8/HxeeeWVDm9kRzMMg3Br6VodIyIiYo+YekIAMjMzmTVr1l4fKyoq2ufPrVq1KtaX6lS7Nyvr2bOQRUREeipHbtsOROeEmGH1hIiIiNjBsSEkbGhOiIiIiJ0cG0KiwzFhDceIiIjYwbEhJBwdjlFPiIiIiB0cG0JCGo4RERGxlWNDSFj7hIiIiNjKuSHE0HCMiIiInZwbQiKlK4SIiIjYwrEhJNS6T5uGY0REROzh2BASGY5RT4iIiIg9nBtCtE+IiIiIrZwbQtQTIiIiYisHh5DWa/cphIiIiNjCuSEkujpGwzEiIiJ2cGwIMTUcIyIiYivHhhANx4iIiNjLuSFEm5WJiIjYyrkhpPUCdpoTIiIiYg/HhhAzEkJMhRARERE7ODaERHpCDA3HiIiI2MKxIcREwzEiIiJ2cmwIieyYapjqCREREbGDY0OIqYmpIiIitnJ8CFFPiIiIiD0cHEJah2M0MVVERMQWzg0hrkhPiIZjRERE7ODcEBJdoqsQIiIiYgeFEPWEiIiI2EIhRCFERETEFgohWh0jIiJiC8eGEKKrY9QTIiIiYgfHhpBIT4hLwzEiIiK2cG4IcWk4RkRExE7ODSGGB1BPiIiIiF0cG0Kic0LMsM0NERERcSbHhhDtmCoiImIvx4YQosMxmhMiIiJiB8eGkMgF7DQnRERExB6ODSG0Dse4CYFp2twYERER53FuCDH2KF2TU0VERLqcg0OIZ/ffw5oXIiIi0tUcG0Iiq2MAhRAREREbODaEYOwZQjQ5VUREpKs5N4SoJ0RERMRWjg0hxp4TU9UTIiIi0uUcG0JcLoMWs7U3RD0hIiIiXc65IcSAcKR8hRAREZEu5+AQYhBUCBEREbGNg0MIhIgMx2hOiIiISFdzdAhRT4iIiIh9HB1Coj0huoidiIhIl4s5hFRUVFBYWEhBQQETJ05k5syZBIPtexLC4TCPPvooJ598Mvn5+Zx77rm8/vrrHdLojmDNCdHqGBEREbvEHEKmT5+O3+9n4cKFzJ07l8WLFzNnzpx25z333HMsWLCA//u//6OoqIhbb72V2267jU2bNnVEuw+b24CQhmNERERsE1MIKSkpYenSpdx+++34fD7y8vIoLCzkueeea3fu5ZdfzquvvsqRRx5JIBCgsrISn89HQkJChzX+cLgMCJqamCoiImIXz4FP2W3NmjWkpaWRk5MTPTZkyBDKysqoqakhJSUletzlcuH3+/nPf/7Dddddh2ma3HXXXWRnZ8fUwFCo4wNCKBTCZRjRnpBQMACd8DrdReQ97Iz3srtyWs1OqxecV7PT6gXn1dxb6o2l/TGFkPr6enw+X5tjkfsNDQ1tQkjEhAkTKC4uZtmyZRQWFpKVlcXZZ5990K9ZXFwcSxMPmmuP4Zi1q1dSV+k7wE/0fJ31XnZnTqvZafWC82p2Wr3gvJqdVG9MIcTv99PY2NjmWOR+YmLiXn/G6/UCcMIJJ3D++efz6quvxhRCxowZg9vtPvCJMQiFQiwrWxZdHTN08EAYPLZDX6M7CYVCFBcXd8p72V05rWan1QvOq9lp9YLzau4t9UbqOBgxhZBhw4ZRVVVFeXk5mZmZAKxbt47c3FySk5PbnPvAAw8AcOedd0aPBQIB0tLSYnlJ3G53p3wYe+6Y6saEHvyBH6zOei+7M6fV7LR6wXk1O61ecF7NTqo3pompAwcOZNy4cdx///3U1dVRWlrK448/zrRp09qdW1BQwN/+9jeWLVtGOBzmvffe4/XXX+fiiy/usMYfDu2YKiIiYq+Yl+jOmjWLYDDIlClTuOSSS5g8eTKFhYUA5Ofn88orrwBw+umnc88993DPPfcwfvx4HnvsMR599FGOO+64jq3gEFk7pmqfEBEREbvENBwDkJmZyaxZs/b6WFFRUZv706ZN22svSXfg0j4hIiIitnLwtu3GHvuEKISIiIh0NceGkLY7pmpOiIiISFdzbAjRcIyIiIi9HBxCdAE7EREROzk4hOy5RFchREREpKs5OoRENivDDNvbGBEREQdydAhRT4iIiIh9HBxCdm/brhAiIiLS9RwcQiCkfUJERERs4+wQop4QERER2yiEgDYrExERsYGDQ4j2CREREbGTg0OIVseIiIjYydEhJKjhGBEREds4OIQY0Z4QM6SeEBERka7m4BCyuyckrOEYERGRLufoEBLZJ0Q9ISIiIl3P2SFE+4SIiIjYxrEhxK05ISIiIrZybAjZc06IqZ4QERGRLufoEBLtCVEIERER6XKODSGGYUTnhJgh7RMiIiLS1RwbQgDChnZMFRERsYvDQ4gH0HCMiIiIHRweQlp7QoLN9jZERETEgRwdQnYaGQB4akptbomIiIjzODqEbDKOAMBTUwKhFptbIyIi4iyODiHlRh8aTS9GOAi7SuxujoiIiKM4OoQYLjcbzL7WnYo19jZGRETEYRwdQtwug/WREFKuECIiItKVHB1CXC6DdeoJERERsYWjQ4jbgPXhSE/IWnsbIyIi4jDODiEug/VmP+uOekJERES6lKNDiMsw2GDmWnfqd0Jjla3tERERcRJHhxC3y6AOPwFftnWgQkMyIiIiXcXRIcRlGAA0pgyyDiiEiIiIdBlHhxC3ywohDcmDrQNapisiItJlnB1CWntC6pMjPSEKISIiIl3F0SHE1Vp9ffJA6y9apisiItJlHB1CIsMxtUmtwzGV6yActrFFIiIizuHoEBKZmFrn6wduLwSboLrU5laJiIg4g6NDSKQnJIwLMlp7QzQvREREpEs4OoREekJCYRP6DLUOal6IiIhIl3B0CIn2hJh7hBD1hIiIiHQJhRBae0Iyh1kHtVeIiIhIl3B2CGkzHNMaQrRrqoiISJdwdghprT5s7tETUrMFAvX2NUpERMQhHB1Cdk9MBfwZ4MuwHlBviIiISKdzdAiJzgkxTeuA5oWIiIh0GUeHEFdkdUy4NYRoXoiIiEiXcXQIaTMxFSAzsleIekJEREQ6W8whpKKigsLCQgoKCpg4cSIzZ84kGAzu9dwXXniBs846i/z8fM466yyee+65w25wR2qzTwjs0ROiECIiItLZYg4h06dPx+/3s3DhQubOncvixYuZM2dOu/PeeecdHn74YR588EE+++wzHnjgAX7/+9/z1ltvdUS7O0RrBtmjJyQSQtZBJJiIiIhIp4gphJSUlLB06VJuv/12fD4feXl5FBYW7rWHY/v27Vx33XWMHTsWwzDIz89n4sSJLFu2rMMaf7jaTUxNHwSGGwJ1ULvNxpaJiIj0fp5YTl6zZg1paWnk5OREjw0ZMoSysjJqampISUmJHr/88svb/GxFRQXLli3jrrvuiqmBoVAopvNjec7WjhCCwbB1zHDjSh+AUbme0M5VkJjd4a9th0i9nfFedldOq9lp9YLzanZaveC8mntLvbG0P6YQUl9fj8/na3Mscr+hoaFNCNnTzp07uf766xk9ejTnnHNOLC9JcXFxTOfHompXJQBbtm5l+fJaAIZ4skljPZuXv095VXKnvbYdOvO97K6cVrPT6gXn1ey0esF5NTup3phCiN/vp7Gxsc2xyP3ExMS9/szy5cv56U9/SkFBAf/93/+NxxPTSzJmzBjcbndMP3MgoVCI4uJisjIzYX0p2dk5jB1rzQcxdh4HOz4mz99E/7FjO/R17RKptzPey+7KaTU7rV5wXs1OqxecV3NvqTdSx8GIKREMGzaMqqoqysvLyczMBGDdunXk5uaSnNy+12Du3Ln89re/5eabb+aaa66J5aWi3G53p30YntZ928OtrwNA5nAAXBXroAf/EuxNZ76X3ZXTanZaveC8mp1WLzivZifVG9PE1IEDBzJu3Djuv/9+6urqKC0t5fHHH2fatGntzn3rrbf49a9/zaOPPnrIAaSzRa4dEwrvcTBTy3RFRES6QsxLdGfNmkUwGGTKlClccsklTJ48mcLCQgDy8/N55ZVXAJg9ezahUIibb76Z/Pz86O3ee+/t2AoOQ+TaMeE9l+P2ad2wrGoTBJttaJWIiIgzxDZBA8jMzGTWrFl7fayoqCj691dfffXQW9VFokt0w3uEkKQc8CZDoBYq10P2UTa1TkREpHdz9Lbtrm9u2w5gGNq+XUREpAs4OoS027Y9Qtu3i4iIdDpnh5C99YTA7smp5bqaroiISGdxdAhx7asnJBpCVnVxi0RERJzD0SFk9xLdb4SQnNHWn9u/gnDP3j5XRESku3J0CNk9MfUbD2QMhjg/BButK+qKiIhIh3N0CNnnxFSXG3JGWX/ftqKLWyUiIuIMzg4h+5qYCpA7xvpzm3MuJCQiItKVHB1CIhNTQ9/sCYHdIWT7F13YIhEREedwdAiJ9ISE99oTcoz1p3pCREREOoWzQ8jetm2PyD4KMKBuO9Ru79qGiYiIOIBCCHuZmArgTdx9Mbvt6g0RERHpaI4OIT6vG4BPSnaxbGNl+xM0OVVERKTTODqETBmZxah+KVQ1tPC9pz7m78s2tT1BIURERKTTODqE+L0eXrzhBM4ek0tLyOSOl4r5y6INu0/Q5FQREZFO4+gQAlYQmf1fx3H9twYD8LelpbsfjPSElK+BQL0NrRMREem9HB9CwNov5KpJAwFYu7OOppbW68Uk50BiNmDCjq9ta5+IiEhvpBDSKjclgYxEL6GwyerttXs8oHkhIiIinUEhpJVhGIzqlwLAl2U1ux9QCBEREekUCiF7OLpvJIRU7z4YCSFbl3d9g0RERHoxhZA9HN3aE/LVnj0h/QusP7eugJZGG1olIiLSOymE7GFUv1QAvt5au3sr97QBkNwXwi2w5TMbWyciItK7KITsYVBmIr44N40tITaUty7JNQw48njr75sW29c4ERGRXkYhZA9ul8HIvskAfLV1jyGZvEgI+diGVomIiPROCiHfsHuFzB6TUyM9IaVLIRyyoVUiIiK9j0LINxzd15oX0mZyas5o8CZBc7U2LRMREekgCiHfMGqPFTKm2To51e2B/uOtv5dqSEZERKQjKIR8w4jcZNwug4r6ANtrmnc/cOQJ1p+aFyIiItIhFEK+ISHOzZCsRGAf80IUQkRERDqEQsheRPYLabdpmeGG6lKo3mxTy0RERHoPhZC9iMwLKd6yR0+INxH6HmP9Xb0hIiIih00hZC/GDUgHYOGacmqaWnY/oHkhIiIiHUYhZC/G5qUxNDuJxpYQrywv2/1AZF5IySJ7GiYiItKLKITshWEYXDY+D4C/Lyvd/cDAyYABO76Cmq32NE5ERKSXUAjZhwuP64/X7aJ4SzVfROaG+DPgiOOsv697177GiYiI9AIKIfuQkejlzFE5wDd6Q4aebv259h0bWiUiItJ7KITsx2XjjwRgwfItNAZarxkTCSHr3td1ZERERA6DQsh+nDikD3kZPmqbgrxe3DoHpN9xkJAKTVWw5TNb2yciItKTKYTsh8tlcGmBNUF1zkcbCYdN6zoyg0+1TtCQjIiIyCFTCDmAyyYcSaLXTfGWal6L9IYMnWL9qcmpIiIih0wh5AAyk+K54eQhADz4xkqaWkIwpDWEbPkUGiptbJ2IiEjPpRByEK6dPJjclAS2VDXyzEcbIfUIyD4azDCsf9/u5omIiPRICiEHwed187OzRgAw+/21VNYHYMhp1oNr37OxZSIiIj2XQshBujD/CI7um0JtU5Dfv7N691LdVa9DMGBv40RERHoghZCD5HIZ3HPOUQD838clfOoaDcl9obHSCiIiIiISE4WQGJw4JJOLjuuPacId878iOOa/rAc++6u9DRMREemBFEJi9MtzjiIzKZ61O+r4S+Mk6+C696CqdP8/KCIiIm0ohMQoze/ltxeMBuCBJQHq+p4ImLD8eXsbJiIi0sMohByCb4/OZeqYvoTCJrOqjrcOFj0L4bC9DRMREelBFEIO0X3njyInJZ5ndh1DvZEE1Ztgwwd2N0tERKTHUAg5RJlJ8fzpyvG4vT7mtpwAgPnpMza3SkREpOeIOYRUVFRQWFhIQUEBEydOZObMmQSDwf3+zFtvvcWUKVMOuZHd1egjUnn0v/L5e9jauMz86hWoWGdzq0RERHqGmEPI9OnT8fv9LFy4kLlz57J48WLmzJmz13NbWlp46qmnuPXWWzFN83Db2i1NOSqHi6d+h3dD+bgIs/nl++xukoiISI8QUwgpKSlh6dKl3H777fh8PvLy8igsLOS5557b6/nXXHMNS5Ys4brrruuQxnZXV08axLqjfwJA35JX+Kr4E5tbJCIi0v15Yjl5zZo1pKWlkZOTEz02ZMgQysrKqKmpISUlpc35Dz30ELm5ucybN++QGxgKhQ75Zw/0nB353FdNu4Cih58iv/FjSub9isS+f6N/ur/Dnv9wdEa93Z3TanZaveC8mp1WLziv5t5SbyztjymE1NfX4/P52hyL3G9oaGgXQnJzc2N5+r0qLi4+7Ofoqud25V8JH33MWeFFXP7E37n61GPo43N36Gscjs58L7srp9XstHrBeTU7rV5wXs1OqjemEOL3+2lsbGxzLHI/MTGx41q1hzFjxuB2d+wXeSgUori4uBOeeyyN217Gt/5Nrgj8nd9+NJhnr5nAEem+A/9oJ+q8ersvp9XstHrBeTU7rV5wXs29pd5IHQcjphAybNgwqqqqKC8vJzMzE4B169aRm5tLcnJy7C09CG63u9M+jM54bt+Z92D+8S3Odi/l39X/5LKnTJ6/7ngGZnZOSItFZ76X3ZXTanZaveC8mp1WLzivZifVG9PE1IEDBzJu3Djuv/9+6urqKC0t5fHHH2fatGmd1b6eJ3cMxmn3ADAjbg6ZNV9y/mOLeHn5ll67QkhERORQxLxEd9asWQSDQaZMmcIll1zC5MmTKSwsBCA/P59XXnmlwxvZ45x0K4yYipcgf/LNwt1YwU//tpwfP/sZ5XXNdrdORESkW4hpOAYgMzOTWbNm7fWxoqKivR6/8MILufDCC2N9qZ7L5YLvPgFPnUZWxVpeyXmKM3ZO580vt7F0YyW/vWA0Z4/pa3crRUREbKVt2ztLQipc+ix4k+hf/SmLj36Jo3ISqawPUPjcZ9z0QhG76gN2t1JERMQ2CiGdKfsouOSv4PKQtnYBrx31Nj85dShul8Grn5cx5eEPefbjEoIhXX1XREScRyGksw2dAuc/BoD749n8LPlt5v34RIbnJFFZH+CeBV8wddZ/ePfr7YTDmrgqIiLOoRDSFY69DE7/tfX3f93NsRue5p83ncR9540izR/Hqu21/PCZTzj5d+/z2Ptr2VHbZGtzRUREukLME1PlEE2aDoEG+Pf/wHu/Ia5xF1ee+VvOH9uPx95fy9+XlVJa2chDb63id/9axYSBGZw9pi/fGZ1LdkqC3a0XERHpcAohXcUw4LS7wZcOb90Fi2dDfTlp5/6eu6ceza1njOC1FWW8sHQTn22qYsmGSpZsqOTXr35JwYB0zh7Tl5OGZtIvzUdivD42ERHp+fRt1tVOKARfGrz8E1jxN9j+JVzyDL4+Q7i4II+LC/LYvKuBN7/YxuvFW/lsUxXLNu5i2cZd0adITvAwsE8ix/RP5dj+aRzdL4WBmYkkdfNwUtUQIM3vtbsZIiLSTXTvb63eauz3ILkvvHQtbC+G/z3Fmrx69HkA9E/3c+3kwVw7eTBbqxt5o3gbb36xja+31lDbHKS2KUjxlmqKt1Tz3JJN0afNTPLSL81Hmt9Lmi+OhDgXgWCYppYQtTXVjN6+iiP7JDKwTyIjc5PpkxTfZSU//K9VPPr+Wm48ZSg/O2tE9Phnm3YxZ9FGfnjSII7NS+uy9oiIiP0UQuwy5FS4YSG8eDWUfgz/uAKOL4TT7wPP7t6Cvqk+rjlpENecNAiAuuYg26obWb29js83V/F5aRVrttdRUR+gvM667cui0g1t7mcnx3NU35TWWzJH9U1hUGYice6Ona9cvLma2e+vxTRh9vtryU6J5wcnDGTJ+gqunrOMhkCID1fvZO4NJzAsp3OuQSQiIt2PQoidUvrBVa/Bu/fBR4/Cx4/D5k/g4r9Aav+9/khSvIeh2ckMzU5us+tqbVMLJRUNbK9pYldDC1UNAZqDYeI9Lrxug/UlpYT96WzZ1cS6nXVsrGhgR20zO2p38uHqndHniXMbDMlKYnhOMiNykxmek0y6P46vttawYnM1VQ0tnDe2H98ZnUuc20VtUwsvfbqZ/6ytAEzcLoM0n5cfnTyYIVlJBENh7pq/grAJR6T52FLVyK9e+ZLy2maeWriBxpYQ8R4X1Y0tXPnnpcwrnERuqibiiog4gUKI3dxxcOZv4cgTYP6PYfNSeOJE61j+FdaE1oOQnBDH6CNSGX1EarvHQqEQy+MrGTv26OiVGeubg6zcVsvXW2uit1XbaqkPhFi5rZaV22rh872/1jtfb6dfagLHD+7DW19uoz4QanfOayvK+O+LjmFHTRNfbKkhJcHD/BtP5JG31/DC0k3Mem8tAN8ansWDF43h8qeXsH5nPVf9ZSk/OGEgjS0hTNPkrFG55GX4D/LNFBGRnkQhpLsYORWu/xDmXg1lRfDKTbDiH3DuH6DPkA5/ucR4D+MGpDNuQHr0mGmabKlqZPX2WlZtq2v9s5bK+gAj+yZzTGvAeX7pJsqqm5hXtAWAodlJXFLQn5SEOIJhk9dWlPHx+kpufqEIj8sKUXedfRTZyQn85vxR7Kxt4p2vd3DqiCye+P44EuLcPHP1BC584iNWbqvlF/OLo2166K1V3HDyEG44eQg+rxWgmoMh4j3OuMy1iEhvphDSnWQMgh++A0uegPdmwsaF8MQk+Pb9MO7qg+4VOVSGYdA/3U//dD+njczZ53mFpw7llc/L+HprDacflcOJQ/pg7NG2y8bn8Yd31zD7/bUEwybjB6ZzaUEeAB63iyevKOCLLdWM6peCp3X+SV6Gn2d/OJHZ76+lqSWEL87NlqpGPi3ZxR/eXcPfl5WSnuhl864GapuCDMtO4jujczn96BzqmoJ8va2WDeV1HJHmZ9yAdEbmJPLlzgD/fP1rPl6/iyMz/Jx7bD9OG5lNXXOQj9aV82nJLvLS/Zw/tl90LxbTNNlZ10xKQhwJcQo6IiKdSSGku3F74MSb4Khz4ZWbYcOH8NotsOYdOO9RSOxjdwtJiHNzSWuo2BuP28VtZ45gwqAMXi/exk9OG4orUAur3oCjz8cdl7B7JUxDJRQ9C2MvZ0RuHx79r/zo85imyevF25j5z68oq25iW83unWTX7KhjzXtro8M6+1YJwFdba3jzy214PdaKoT399xtfc9KwLDwugxWbqymvaybR6+bMUbmcc0xfGltCfLBqJ4vXVZCZHM/lE47k3GP7EQiGWbB8CwuWb8EAJg/L4pQRWYw+IrXDJ/eKiPRGCiHdVfpAuGKBNVn1nV/Dqn/CYx/DxBtg/LXgzzj45zJNMLv+InmTh2UxeViWdeeFH1o1bPg3XPDY7nbNvx7W/AvWvg0/eKVNb49hGEw9pi+njszig1U78Xnd9E/zkeqPY9Hact4o3sZH6yrok+RlZG4yg7OS2Fhezyclu9hZ20yS1+CMUX05ZUQ2X2+t5bUVZWze1QjAqH4pjB+YwYrNVXy2qYp/7zE5F6A+EGJ+0Rbmtw45RWypauTz0ip+88+vaA6G2wSazzZV8Yd31wCQ6HWT6osjJzWBIVlJDM5KJDneQ3VjC1UNLcTHuRiWncywnCQ8Lhcrt9WwenstzS1hhmYnMSwnmbwMHwlxbuI9LlyGQVNLiKaWMD6vu9vvCSMicjD0f7LuzOWCE38CgybDS9dB+Sp4fyb85/dw7KUwZAoMnGTtwrqnlibY8ZW10mbTR7hKFnNc3Q74IAuSsiFtAPQvgP4ToF8+eDtw4mc4DE1VbUPS+g+sAAKw/Flrn5SBk2Dla1YAASucLH8O8r/f7in9Xk+blUAA383vz3fz976CyDRNdtQ0UrL6K8Yddwxut5vzx8Id3x7Bup11pPu9bfZI2VBez5tfbCMhzsUx/dM4qm8yX2+t5dXPy3j7q+0kxrs5eXgWJw3LYuXWGp5dUkJppRVmRuYmc+n4PPxeNx+u3snCNeXUNgWpD4SoD4Qoq26iaFPVIb6Ze+cyoGBgBmcclcPEwRn4vW5cQHlDiPK6ZhK8cXjdLrweF25X5w7hiYgcDsM0zW556dZQKMTy5csZO3ZsdEVHT3juThMKwlcLrACyfffETQyX1WviTQJvIjRVw85VYLZfsbJXbi/kTYTBp8Cgk6HvMeDZxyZm9RVQsxlyxlgBKcI0Yctn8OU8+HKBdc6UX8HkWyEcgj9Ohh1fgi8DGishcwT88C144iTr3KyRsHMlJKTCjcusoPTpX2DJ/8JxP4DjfxzzfJjO/IzDYZNPN+3CF+dmVL+UNvNhQmGTmsYWqltvW6oaWbejjrU762hqCZHm85Lqj6OuOcia7bWs3l5HOGwyPNdaEh3vcbF2hzUpeHtN82G31WVYk5CPzPAzoI813yfNH0eqL44Mv5ec1AT6pfrISo7vcYGlR/53fBicVi84r+beUm8sdagnpKdwe2DMNBh9Eax7D1a9bvUelK+GyvXtz/f3gb5j4cgTCOVN5MuyBkYNzMHdsNPqUSldat3qtlkTYDcuBH4DrjjIHQ1HjNt9M8Pw8ROw4u8QbLJCyJRfWj0xXy2ARb+HbcVtX//d+yAchMQsK4AkpMF178KfzrRe/6kpVgBJOxKufQfmnANbl1urgswQrH3Hep637rKe+5xHIO4Q9w+p3mIFnPiktscbKiHO3/Z5y9fA6jetQDfgJMgc1i4AuVwG4wfu0dMTyfGGgdtlkJ7oJT3RC811HJvuhqGZ4O5nvVYMYco0TQKhMM3BMGYY4uNcxHtcbN7VyLtfb+edr3ewenstLSFrWKg5GOIb010Im1DbFOTLshq+LKvZ52t5XAY5KQn0TU3gyAw/Z47K4dSR2VqFJCKdSiGkpzEMGDrFugHUbIVdG6wr9LbUgycBckZbG6FFvvBCIVp2Lbd6OdxuGH6mddw0rQCz7j1ryKTkI6unoqzIui17uv3ruzxWT8zzl0B8CjS3frF5fDDybBh1Iez8Gt77rTV05G7tVTn1F5AxGM66H+ZdB5XrrOPfeQjik61Jt/97Cqx5yzrujrdC1+d/g8+ft8LW8G9br9fSCOkDIHeMVWuwGeq2Q0OFFXZSj4C4JDJK38L12e2w5VMrXPUfDwNPgtqtULLIqt3lgeyjrefZtgK2f9G23sQs6zmbqq1bXAIk94PkXAi1WEGqpszqkUrKsY4Hm6Fqk/Ve7ikhDQZMstqQlA1VJbBro9XLlZxrfWZx/uhrGS4P8RmDiM8YbPVOVayF8jXkudxcNeZirpo0MfrUkX95HHvssYRxRYNJqGYbdTW7WBvKpaSygbKqxui8lMr6ZrZVN7G9tplg2FqevaWqkU9KdjGvaAspCR5OPyqH+DgXzcEwmDAkO4mRuckM6JPIztpmSiutDfKSEjxkJHrJTk5g3IB0vB5NzBWRA1MI6elS+lq3Q2EY1h4kfYbAhOusUFJVYn1pb/nMum1dbn3pj5wKJ9xoDZ0s+j0sedIKBP4+7SfLHnWO9aX/zq8g1AyZw6HgGuuxMRdbcz/WfwAjpsKIb1vH+x4D3/oZfPig1YPz3Sche6R1/otXwZZPrNtBcgODdhcK4RbY9JF121M4aIWPbSus+y6PNSwVCsDmZVC/07pFhJpbh7y+bv+iVSXWbV+aqqy5MZH5MYfj3Rkw7Cxr+//yNbi2f8nRu7biXjkcT8YgvEDihn/Djq/IBAYOPd3aAC/7aOtz3rkKqiqh77GEErPZWdtMWXUjW3c1sLpkM38vrmVbbXN0L5hYHJHm44ZThnDxuP5a5iwi+6UQIrsZhjW/JH2gNewD1r/Sg01thzLOmAETf2wNq/SfsPeJrSdNt4Y0Pn0Gpv4/a2fYyGtc9Gf4/AXIv7ztz5xyF4yeZvWYuFt/NYecCte9Z60SCrVYvSaeeGvYZNsKqyfB5YHEbGv5cmOV1TNhhmj25RB3/I9wjfsBBOqs4LNpidULMfAkay5Mc63V67P9C0jNs8JWJEwFm2Hr51YgSUiDhBQrkNVssXqgPPGQcoTVg4EJtdutXhZ3nDX5Ny3P6i0KB63n2rnKGvYqWQTNdVZvTvpAa15O7Var3cEma+goIRWCAau3pnKdNdk4cyj0GWa9/saFsPoN6wYYgA+gtu31gcCwemnWvmP1eA0+1bpyc9226BnuPkPJzRpJblUJlK9larCR6Z4EmrL6Um6kY7o8GIYLTJOW5nporiMUCvJV3GjWpE+mJnMsOdXFDK36D0c2fEVDg5uaf/r54K0k3B4vgbBB0DTok+TjiD7J9O+TQlxaP4Ip/Qkk9ceXNQgjKdv63ajZCuvehU2Lrd615FzrPR70LauHS0R6FU1M7cGTfw5Wr663pdEautlzomwoSKhuJ8vXlDI2f1zvqxmsEPbpHKhYB5nDCGcdxbpt1Qzp48VVtdEKPQNOgIHfsnpg3vkVfP3q7p/3JFjXJ6pYB9j/v4CA4aXZm0Fy87a9n2C4YPh3YPw11sRmM0QoGOTzjeUce9z43vkZf0Ov/u94H5xWc2+pVxNTxTnifO2PuT1Wb4dR1vXt6SqZw+CsmdG7ZihETXg55tix1ryfPSX2gUufhZLF1rWJ+uVbPVhxCdC4y+odqlxv9cpkjYDkvtYcm5otULtt9z4zhmG9395EK/ytfQdWvQm1ZVZvxbAzrVVWLjfB+ipKt23DZYaIM0zC4SAbdlSzbnsNjY0N5Bi76G/spL+xk1x24SWAt3kbYdNghTmIReHRmBgMSahhjHcH/Ru+bDeU5QaOdSfgWnmi9brZoyA5x2q/v0/7ScDNdeBy7/13RkRsoRAi4hQDTrBue/Kl756Xs6eMQdZtf0ZOhakPW3NmErPafOl72HNOjqU/cJJpUlbdhMsAf5wHlws+3VzBmrUr2bZ5IyVGf6pdKTQ0h1heWkWgzlruM9TYzOXudznP/RHJRiOm4cFtmHhCTdYw07r32rxW2J9JS+5xNGbnkxjcRdzmxbDtC8C0JhanD7SG0wL11i0x0wpn/cZa90uXWPvs+PtAwdVw1HnWMFvdDlj3vtW7tOdE5Iq1Vq+SYbSGvILWMLcD6ndYS9Uj57c0WM+9eZk1/Db825B3vNWbZ5pW+GuqseZfufYzwTccgurNVoAM1FmTniNDiU01UPyiNcw4ZAoMP8tqf0ujtfpr8yeQM8r6mbQjrSHD6s1WXTljdg+H9lSmaQ2lBputuWUJ7S/suV8tjdbwb0JK57RPonr4b5qI2MowrF6ngz7d4Ii0tj0R44fmMn5obrtz65uD/GdtOf9evZM12zN4rHww99VduccZJiOMUia5vmSi62v6GzvJNnaRZdTgaignfv2/iF//r/aNqC2zbt+04cO9N3rjQitQJGVbX2wdbdEfICkXco62lqNHJkInZsHQMyBrOGz/ElfZcvJ3lWC84bZ6dILN1oTrCMNlzXNKOxK+fs1aLQfwyZ+t5zryBGteVGRFW0RCmnUssqtyah5M+BEcd4U1wbxmizVfqal69+q0lH6QMcQKVaVLrF6xTR9bvw/eZGsOmTfJ+jM+xZojlTXcCn87vra2Fyhdap3vS7fa0FhpBaGaMuv9zpuAcUQBfUo3Y1S9Yc2bSs6FASdadVaXwsp/wtp3rdDUfzz0O856/s9f2L0CD6zgmT0Sso7aHUoiK97CIWtVmtdvzTHbsNCanB9ugfRB0PfYtje31/o9KCuyAmW/46zQmZBmtbFyHewqseqIvHeRv4cC1qq+fvmQewz0GWotDNhbz11EzVZrjlTtNutyHmn7vmQGLdYmirg81q2TrzfWETQnpAePux0sp9ULzqvZKfVWN7Tw1dYaviyr5suyajZtq8CTkEhjS4jK+gDldc2YLU2MMjZynHsNY10bqAgnsiR8FMvCIwni4khjB8elVJOb5CHg8tPiiqcv5QwLrSWvaTVurw8zbwKpQ0/A2F4Mn84hrnH3CqlA1hhIOxKjfgdG3XYMtwdX5jCMzGGtq6o+scJEuMX6Ek/KtkJD7XZrdRVYX+B5E6x/sa96A5qrdxfp8lhfdC0NB35D3F7ri91wt1+xlTnc6ulY+U+rNyYipT8MPc36si4rsnpBwJpb5fZCoNa6b7hsudzDwTE44FymOH9rMOiiYVl3/O7PN1YJqZAxhHDGYHY2GGQluXHV77B61/ZccWe4YdQF1s7STTW7e+Aq1lq3b24LYLh2/z4l97UCTHI/q+essdJaeHDGDMgbf8hl743mhIhIr5Tqj+OEIX04YUifff6PriEQxMAgIc6FYRjsqGmi/5Zqhm6uZvG6coo2pfJ5tQnVez7zkcBxu++WgbEUTDMfL6M5zVVEPAEWhcdQXpoKpW3b5d5ukJHopW9qAgP7fI+hAzz080M4PhW320V8nIskr5s0Vz1J8R78qVkkJ3hI9HpwhQOw/kNrz5ncY6xhEsMNpR9blzWo3gI5RxPKGcNXO4IcffRo3IZpDa8k97UCDlh706x+y/rX/IizrR4Dw4CzH7Kep2y5tcpowKTdwzyBeuvLKynHWmEWCljDOB8/YW0yCFZPRko/q8ciPsUaxqrebP2Lv6na6uUYdoY1L8fjs0JMc531RddcZw3xVK63Voft2mitChv0LWszwDifNS+pcRf40qzJ0sn9rPNLl2Bu+ZSa+iaS847GlXKEtSfSxkVQvcn60h98Moz4jvVlu3kZbCmCpCw45jKr1yA+yVoxt3OVFdJ2rLT+bGmyXi8h1XqvW+qtvZZ86dbKuUGTrQCzbYXV6xG5VbReMDM1z+rNiPNbvSYVa6wA4vJY70fGoNaVc62r51L6WX83jN29KNu/gIr11ufeVA1ln+Eq+4x21y83XNY+Rt5Eq0fki5es28Eww9ZnGgpYbaxY0/6ckkUdHkJioZ6QXvyvxgin1QvOq9lp9cKh11zb1MKyjZVU1rcQNk3CYZO65iAV9QEq6popqWhg9fZadjVYQx0ZiV6GZiXhcRtsrW5iS1Vj9MKFHpdBMHzo/ws1DEjyekhK8JDm95KX7uPIDD/pid7o9v8NgRBul4GBSX3NLo4dmkdeRiJ9UxNIag0yifEe/F7rYodGR3TBm6Y13BFZmr6vcwL11pfjwb6macY0RLDPz7h22+7hnq7UVGP1HH3zAqKNu6ywk9p/93YEB6ulESo3QOU6wuVr2bHhS7IHjcIV2cCwX/7uz2DrClj8mDVEGBkSiwzp9BlqDcW53FYbw6HWW+s2CzVbrKAaee/8GVaI3TOUdhD1hIiI7ENyQhynjWz37802TNOkoj6Ay7B6OL75WChsWsHAMAgEw9GhoC1VjWwor2fDznoq6gOETZNg2KSpJURdU5C65iC1TS3UNgUJhk1ME2qbg9Q2B9la3cTXW/e9tX7EW+tW7/Mxj8vA73WTGG8Fk0SvG39rSPG4DHbUNrG9ppmaphZG5CRzTP80Rh+RQqovjoQ4NwlxLuI9bhLi3Pi8WSSZHpJaazVNk8aWEM0tYVJ9cbhcRuwhoKPmKCS3n0PUJfYVyHzp7S8kerDifNZ8oJyjMUMhtviXk7W3VW5gbep44ZOH9jp9hhzaz3UyhRARkW8wDIPMPa60/M3HPO7dX6Zej4vc1ARyUxMYfcTBrcIwTZPmYJjaJiuU1DUHKa9rprSykdLKBqobW0jxxZHmi8PndWOa0BIKsX7TFoLxqZRVNbG9ton65iD1zSEaW6wLVgbDJjVNQWqaggdswyclu/ikZNdBtTfe4yIQCkcvkxTvcXFkhp8j0n00BEJU1DWzq6GFUGuvkMuAnJQEBmUmMjAzkUGtt/7pPsqqGvliSw0rt1nXPfK4rGsu5aYkMCjLOi8chor6Zirqmind1EhF/A6SfHFgQlPQCkLpiV4GZyaSlRzfMb0/h8E0rUDp6mEXgewOFEJERLqYYRitPQ9uspL3cdXqb7C6uGsYO/aYdl3cobBJQ8AKJPWBIA3NIeqagzQEgq1/hggEw2Qnx5OTmoDf6+brrTV8XlrNqm21NASCNLWEaQqGaAyEaGoJ0dQSJhCyhp2av3FlxOZgmDU76lizo26f7d3V0MLKbbUxvjP7sOSzfT6UFO8hPTEOj8uFx2VEe59agmFawibBUJiWkEmqL47BWYkMzU6yrhrdesHJyM1lWD/bGAjR0PoeNASsgOcyIM3vJc0fRzhssqmygU2VDeyoaY5eMdvjNji6bwrH9E+jX1oCFfUBymut3rAhWYkMz0mmT5KX0spGSioaqGlq4Yg0HwP6+OmX5sMX58bjgtrmMI2BEP54F4YBTS1h6gNBmlpCGIaBywC3yyDVF9fmApPf7KHbm0AwzKbKBlJar/Xkcdt/jSeFEBGRHs7tMkhOiCM54eDnI4zMTeG7+f33e05zMGQFm+Yg8XEuEr0e4twutlZbX6RlVY0kJXjokxjf+qVmffkFQyZbqhrYUN7AxvJ6NlbUs35nPWXVjWQnxzOqXypH900hKcFDKGz1CpVVNbJ+Zx0lFQ143AYZifGk++Oora3F5bV6XFyGQXycizi3i521zWze1UBdsxW0DqSuOciWqkYWrik/6PcoFsGwyWebqvhsU9XhP9krbwPW6NX+Zm0met0kJXiiwSkYNnEZ4Itz44/3kJMST99UH6m+OFZvr2Xl1tposDQMyE1J4HcXH8ukoZmH3+ZDpBAiIiJ7Fe9xE+9xt5sXM6BPIgP6JO73Z0fkJrc7FvmX+sE60ATH5mCI0sqG6BybllAYl2EQ5zaIc7vwuFzEuQ08bhcVdc2s21nH2h11VDW0EGqdlBwyIRQOEwqbuAwDn9eN3+vGF9d683oImyZVDYHoZOUjM/wcmeEnNzWBNH8cqb446ptDfLGlms83V1FRF6BPkpes5HhME9buqGP19lqqGlron271fqQkxLGlqpFNrVeibmoJ0xwM0RLanTr2DCDRK1Ob0BK2hsbqAyHqA6E270l4j+M7a5v5YkvbeUZ+r5umlhBhE7ZWN7Fme61CiIiI9H6xBJCDEe9xMzS7fdjZm0GZiRQMzDjwiYdhaHYSF+Qf+oUWQ6EQn35WxMhRYwiaBsFQGH+8B1+cu817Fw6b1DYF2dUQoLYpiM/rIik+joQ4F4FgmMaWELVNQbbXNFFW3cSu+gCDsxI5tn8a/dN9hE3Y1RCgqSXUbvPArqYQIiIi0k24XQaJ8Z79Lm11uQxS/XGk+vc//LavidJug31OvO5q9s9KEREREUdSCBERERFbKISIiIiILRRCRERExBYKISIiImILhRARERGxhUKIiIiI2EIhRERERGyhECIiIiK2UAgRERERWyiEiIiIiC0UQkRERMQWCiEiIiJii257FV3TNAHr0sYdLfKcnfHc3ZHT6gXn1ey0esF5NTutXnBezb2l3kj7I9/j+2OYB3OWDQKBAMXFxXY3Q0RERA7BmDFj8Hq9+z2n24aQcDhMMBjE5XJhGIbdzREREZGDYJom4XAYj8eDy7X/WR/dNoSIiIhI76aJqSIiImILhRARERGxhUKIiIiI2EIhRERERGyhECIiIiK2UAgRERERWyiEiIiIiC0cFUIqKiooLCykoKCAiRMnMnPmTILBoN3N6lArV67k6quvZsKECUyaNImf//znVFZWAvD5559z8cUXk5+fz2mnncaLL75oc2s7TigU4oorruDOO++MHuut9VZVVfHzn/+ciRMnMn78eAoLC9mxYwfQO2v+8ssvufzyyykoKOCkk07it7/9LYFAAOh99VZWVnLGGWewZMmS6LED1Th//nzOOOMMxo4dy4UXXkhRUVFXN/uw7K3mt956i/PPP5/jjjuO0047jdmzZxMOh6OP9+Sa91ZvxI4dOzjxxBOZN29em+M9ud4DMh3k+9//vnnbbbeZDQ0N5qZNm8ypU6eaTz31lN3N6jCNjY3mpEmTzD/84Q9mc3OzWVlZaV533XXm9ddfb1ZVVZkTJkwwn332WbOlpcX86KOPzPz8fPPzzz+3u9kd4ve//705cuRI84477jBN0+zV9X7/+983b7zxRrO6utqsra01f/KTn5g/+tGPemXNoVDInDRpkvnMM8+YoVDI3Lp1q3nWWWeZs2fP7nX1fvLJJ+bpp59uDh8+3Pz4449N0zzw7/HHH39s5ufnm5988okZCATMv/zlL+bEiRPNhoYGO0s5aHurubi42DzmmGPM9957zwyFQubatWvNU0891fzTn/5kmmbPrnlv9UaEQiHziiuuMEeOHGm+9NJL0eM9ud6D4ZiekJKSEpYuXcrtt9+Oz+cjLy+PwsJCnnvuObub1mHKysoYOXIkN954I16vl/T0dC699FKWLVvGv/71L9LS0rj88svxeDyccMIJnHvuub2i/sWLF/Ovf/2LM888M3qst9b7xRdf8Pnnn/PAAw+QkpJCUlISv/nNb/jZz37WK2uurq5m586dhMPh6MWwXC4XPp+vV9U7f/58fvazn3HLLbe0OX6gGl988UWmTp3KuHHjiIuL46qrriI9PZ3XX3/djjJisq+at2zZwmWXXcapp56Ky+ViyJAhnHHGGSxbtgzouTXvq96Ixx57jNzcXPr27dvmeE+t92A5JoSsWbOGtLQ0cnJyoseGDBlCWVkZNTU1Nras4wwePJinn34at9sdPfbWW28xatQo1qxZw/Dhw9ucP3ToUFauXNnVzexQFRUV3H333fy///f/8Pl80eO9td4VK1YwdOhQ/vGPf3DGGWdw0kkn8eCDD5KVldUra05PT+eqq67iwQcfZMyYMZx88skMHDiQq666qlfVe9JJJ/H2229z9tlntzl+oBrXrl3bY9+DfdV81llncdddd0XvNzU18cEHHzBq1Cig59a8r3oBPv74Y/75z3/yq1/9qt1jPbXeg+WYEFJfX9/mSwqI3m9oaLCjSZ3KNE0eeeQR3n//fe6+++691p+QkNCjaw+Hw9x+++1cffXVjBw5ss1jvbFesHoGVq1axcaNG5k/fz4LFixg+/bt3HHHHb2y5nA4TEJCAr/85S9Zvnw5r732GuvWrWPWrFm9qt6srCw8Hk+74weqsSe/B/uqeU91dXXceOONJCQkcNVVVwE9t+Z91VtRUcEvfvELfve735GYmNju8Z5a78FyTAjx+/00Nja2ORa5v7cPvierq6vj5ptv5tVXX+XZZ59lxIgR+Hw+mpqa2pzX1NTUo2t/8skn8Xq9XHHFFe0e6431AtHLYt99990kJSWRmZnJ9OnT+fDDDzFNs9fV/Pbbb/PWW2/xve99D6/Xy7Bhw7jxxht54YUXeu1nvKcD1dib34P169dz2WWXEQwG+etf/0pSUhLQu2o2TZOf//znXHHFFYwePXqv5/SmevfGMSFk2LBhVFVVUV5eHj22bt06cnNzSU5OtrFlHWvTpk1cdNFF1NXVMXfuXEaMGAHA8OHDWbNmTZtz165dy7Bhw+xoZod4+eWXWbp0KQUFBRQUFPDaa6/x2muvUVBQ0CvrBasbNhwO09LSEj0WWTVw1FFH9bqat27dGl0JE+HxeIiLi+u1n/GeDlTjsGHDeuV78OGHH3LxxRczefJk/vSnP5Gamhp9rDfVvHXrVpYuXcpjjz0W/f9YWVkZ9913H9dffz3Qu+rdK3vnxXat//qv/zJvueUWs7a2Nro6ZtasWXY3q8NUVVWZp5xyinnnnXeaoVCozWOVlZVmQUGB+Ze//MUMBALm4sWLzfz8fHPx4sU2tbbj3XHHHdHVMb213kAgYJ5xxhnmTTfdZNbV1ZkVFRXmD37wA/PGG2/slTWvWbPGHD16tPnEE0+YwWDQ3LRpk3nOOeeYDzzwQK+s1zTNNisnDlRjZLXM4sWLoysnxo8fb+7atcvGCmK3Z81FRUXmqFGjzBdffHGv5/aGmve2Oibi1FNPbbM6pjfUuz+OCiE7d+40b7rpJnPChAnm8ccfbz7wwANmMBi0u1kd5s9//rM5fPhw89hjjzXHjh3b5maaprlixQrz0ksvNfPz880pU6a0+UXvDfYMIabZe+vdtm2bOX36dHPSpElmQUGB+fOf/9ysrq42TbN31rxo0SLz4osvNseNG2eecsop5sMPP2w2Nzebptk76/3mF9SBalywYIF51llnmWPHjjWnTZtmLl++vKubfNj2rPn66683R4wY0e7/YT/84Q+j5/f0mmMJIabZ8+vdH8M0W9e9iYiIiHQhx8wJERERke5FIURERERsoRAiIiIitlAIEREREVsohIiIiIgtFEJERETEFgohIiIiYguFEBEREbGFQoiIiIjYQiFEREREbKEQIiIiIrZQCBERERFb/H8HTQElBh2xvwAAAABJRU5ErkJggg=="
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "text/plain": "<Figure size 640x480 with 1 Axes>",
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiEAAAGbCAYAAAASrkAJAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABi+UlEQVR4nO3dd3hUZd7G8e+UtEkPafQaqpRAaCICAvYuYHd17XF1Rdfu6opl1fVdXVbZdbFg31XWAta1IipSlBJAIAQIJUAa6WUyM+f946QQCSWQZJKc+3NduUjOnDnz/Cbo3DznKTbDMAxEREREWpjd3w0QERERa1IIEREREb9QCBERERG/UAgRERERv1AIEREREb9QCBERERG/UAgRERERv1AIEREREb9w+rsBB+Pz+fB4PNjtdmw2m7+bIyIiIkfAMAx8Ph9OpxO7/dB9Ha02hHg8HtLS0vzdDBERETkKgwcPJjAw8JDntNoQUpOeBg8ejMPhaNJre71e0tLSmuXarZHV6gXr1Wy1esF6NVutXrBeze2l3po6DtcLAq04hNTcgnE4HM32y2jOa7dGVqsXrFez1eoF69VstXrBejW3l3qPZCiFBqaKiIiIXyiEiIiIiF8ohIiIiIhftNoxISIi0rYYhoHH48Hr9TbJ9WquU1FR0S7GSBxOW6nX4XDgdDqbZPkMhRARETlmbreb3bt3U1ZW1mTXNAwDp9NJZmamJdaLakv1ulwuOnbseNgpuIejECIiIsfE5/OxdetWHA4HnTp1IjAwsEk+RA3DoLy8nJCQkFb/odwU2kK9hmHgdrvJyclh69atJCUlHdFU3INRCBERkWPidrvx+Xx07doVl8vVZNetWXkzODi41X4oN6W2Um9ISAgBAQFkZmbidrsJDg4+6mtpYKqIiDSJY/kXsbQtTfW71t8YERER8QuFEBEREfELjQkRERHLeeCBB1i4cCFgbphaVVVFSEhI7eNz584lJSWlUde85pprSElJ4YYbbmh0ey6//HJWrlxZb+qrYRj07t2b22+/neOPP772vGXLlvHQQw9x0UUX1bvG+vXrOe+88xg1ahSvvfYaAOnp6TzzzDOsWLECt9tNXFwcp512GjfddFPtzJaTTjqJnJwcnM4DI8HRvA+NoRAiIiKWM2vWLGbNmgXAu+++y7PPPstXX311TNd84YUXjun51113HVdffTUulwubzUZxcTFPPvkkN910E4sWLSIiIgKA6Oho3nvvvQNCyPz58wkLC6v9uaSkhMsvv5zf/va3PPXUUwQHB7N582Zuu+02cnNzefTRR2vPfeihhzj//POPqf1Hw3K3Y8rcHuYu3kpWscffTRERabcMw6DM7WmCL+8Rn2sYRpO1f+fOnfTr14/HH3+ckSNH8tBDD+F2u3niiSc47bTTSE5OZuzYsTz88MO1r3v55Zfz97//HYC7776bBx54gBtuuIHk5GQmT57Mq6++2qg2hIeHc/nll1NWVkZmZmbt8dNOO41169axdevW2mNut5uPP/6YU089tfbYli1b2LdvH+eee27ttN+kpCTuu+++2kDjb5brCfl8/V4e/3QjJ3YL5vTx/m6NiEj7YxgG0/65hJ8y97Xo66Z0j+adG8Y26fTW0tJSvv/+eyoqKnjllVdYvHgxr7zyCvHx8axcuZLLLruMKVOmMHbs2AOe++677/L888/z7LPPMn/+fGbNmsUpp5xCQkLCEb12fn4+L774Ip07dyYpKan2eExMDCeeeCLvvfcet912GwCff/45gwcPJjExke3btwPQv39/evfuzcUXX8yZZ57JiBEjGDJkCGPGjGHMmDFN8O4cO8v1hFRUmcvillY1XWIWEZH6Wu8qF41z7rnnEhgYSEREBDNmzGDevHnExcWRnZ1NRUUFoaGh7N27t8Hnjh49mnHjxuF0Orngggvwer21AaEhc+fO5cQTT2T48OEcd9xxnH322QQFBfH6668fsBbH+eefzwcffIDP5wPMWzEXXHBBvXMCAwN5++23ufDCC1m6dCmpqamMGTOGSy65hDVr1tQ796GHHiIlJaXe11lnnXU0b1mjWK4nxFE9t9nXhN12IiJSx2az8c4NYymvOrY9ZAzDoKysHJfryFYQDQlwNPkiX/Hx8bXfl5eXM2vWLJYvX05iYiIDBw6sXWCsIXFxcbXfBwQEABz0XIBrr72Wq6++mqCgIBYuXMjDDz9MSkoKnTp1OuDciRMn8sADD7BkyRJ69uzJxo0bOemkk0hPT693XlhYGNdddx3XXXcdbrebtWvXMnfuXK666iq++uorIiMjAXjwwQf9MibEciEkxFPIg85XWFs1CTjJ380REWmXbDYbrsBj+4gxDAM8DlyBTbNZ2tHY/3Xvv/9+IiMj+e677wgKCsLn8zFy5Mgmf02Hw8F5551HZWUl99xzDzExMZxwwgn1znE6nZx11lm899579OjRg7POOuuAfVyefvppfvjhB9555x3A7BkZPnw4f/nLXxgxYgTbt29n8ODBTd7+xrDc7ZiO2Yu4yvkZ51Z+4O+miIhIG1JSUkJQUBB2u52SkhKefPJJSkpKqKqqapbXu+iiizj55JO58847ycvLO+Dx888/ny+//JJ33333gFsxYA5g3bhxI8888wy7du3CMAxyc3N57rnn6N69O/369WuWdjeG5UKIE7MrLMio8HNLRESkLbn//vvZsGEDo0aN4tRTT6WkpITx48ezadOmZnvNhx56iMDAQO69994DHuvXrx89e/akQ4cO9O3b94DH+/fvz+uvv86mTZuYNm0aQ4cO5dxzz6WgoIDXXnutXs/Jgw8+SHJy8gFfc+fObbbaAGxGU85pakJer5dVq1YxbNgwHA5Hk113zcfPM2TZnfxkH8Kw+75p0mu3Vs31XrZmVqvZavWC9WpuzfVWVFSwdetWevbseUybmf2aOSakrHbdjPauLdV7qN95Y/6uWq4nxOYwBwc5OLYBUyIiInJsLBdCqAkhhhYrExER8SfLhRC7ekJERERaBcuFkJrbMU6FEBEREb+ybAjR7RgRERH/slwIsTtrekIUQkRERPzJciHE5jDnRet2jIiIiH9ZLoTYnWYIcagnRERExK8sF0LUEyIiItI6WC6EOKp3MnQaCiEiItL0tm3b5u8mtBmW20VXU3RFROSBBx5g4cKFAHg8HqqqqggJCal9fO7cuaSkpDT6uuvXr2fGjBmsXbu2wcf//ve/M2fOnHpLnft8PuLj47n00kuZPn167XnPPvssZ5xxBn/961/rXcPtdnPiiSeyb98+Nm7cCEBhYSFPP/00X3/9NYWFhYSFhTFu3DhmzpxJYmIiAHfffTcLFy48YLddgOuvv54bbrih0fUeK8uFEIez5naMxoSIiFjVrFmzmDVrFgDvvvsuzz77LF999dUxX7e4uPiwu+qmpKTw2muv1f5cVVXF+++/z/3330+3bt2YNGkSANHR0XzxxRcUFxcTHh5ee/5XX311wGvMnDmT8PBw5s+fT1xcHLm5uTz66KNcddVVLFy4EKfT/Lg/66yzePzxx4+5zqZiudsx6gkREWkBhgHu0ib4Kjvyc5twP9bt27dzww03MHr0aCZNmsTTTz+N2+0GoKSkhJkzZzJ69GjGjRvH1VdfTUZGBjt27ODaa68FIDk5mZUrVx7RawUEBDB9+nSioqJqezYAkpKS6NmzJx9//HG98//73/9yxhln1Dv2008/MXXqVOLi4gCIjY3l3nvvZejQoRQVFR31+9DcLNcT4qwZE2Lz4W2dGwiLiLRthgEvnQI7lh7TZWxAaGOe0HUM/PZTOMYdaMvKyrjyyis544wz+Nvf/kZ+fj633HILPp+P22+/nZdeeomSkhIWLVqE3W7ngQce4KmnnuIf//gHc+fO5YorrjjiAALmjrTvvPMOpaWljBkzpt5j5513Hu+99x4XXnghAHv37iUtLY3LL7+c//znP7XnnXHGGTz44IOsWLGCUaNGMXToUDp37tyqej0aYrkQUjNFFwBfFRZ8C0REWkDr3or+UL755hvcbje33XYbNpuNjh078vvf/55bbrmF22+/neDgYDZs2MD777/PuHHjeOyxx7Dbj/zGwk8//URKSgo+n4+qqiocDgfjx4/nlVdeoW/fvvXOPfvss3nqqafYunUrPXv25N133+X0008nKCio3nmPPPIIo0eP5uOPP+aBBx6guLiYbt26cfPNN3P22WfXnvfhhx/yxRdfHNCmBQsW0KlTp0a+U8fOcp/Ajv1CiK+qEkdgyCHOFhGRRrPZzB6JqrJjuoxhGJSVleNyhWA7kt6NANcx94IA7Nq1i/z8fEaOHFmvLVVVVeTl5XHttdcSGBjI/PnzmTVrFl27duX222/n5JNPPqLrjxgxonZMyKpVq/j9739PXFwcw4cPp6ys/nsWExPDxIkTef/995k5cybvvfcezzzzDMXFxfXOs9vtnHPOOZxzzjkYhkFGRgYffPABd955J3FxcYwdOxaAM888s1X1jlg6hHg9VQT4sS0iIu2WzQaBjbqZciDDAI8NApsmXBypxMREunXrxqefflp7rKSkhLy8PGJiYti4cSMnnXQSV155JcXFxbz55pvMnDmTH3/8sdGvNWzYMP75z39y4YUXEhkZyTXXXHPAOeeddx4PP/wwxx9/PKGhoQwcOJClS+tudS1evJhbbrmFr7/+mqioKGw2G3369OH222/n+++/Z/369bUhpLWx3MBUR8B+PSEetx9bIiIirdGkSZMoLS3lhRdewO12U1RUxF133cXMmTOx2Wy888473HnnneTl5REWFkZYWBgul4vAwMDa2yS/7qk4lAEDBnD33Xfzz3/+k+XLlx/w+IQJE6iqquKRRx5h2rRpBzw+cuRIOnTowD333MPGjRupqqqipKSEBQsWsG3bNiZOnHjU70Vzs1wIcTrsVBkOwOwJERER2V9YWBjz5s1j6dKlnHjiiUyZMgW73c4//vEPAG677Ta6d+/OGWecwfDhw3n33XeZM2cOQUFB9O3blxEjRjB+/HgWLVp0xK95ySWXMGHCBB544AEKCwvrPeZ0Ojn77LPJzMzkzDPPPOC5wcHBvPnmm8TFxXHjjTeSkpLCxIkTWbBgAS+//DK9e/euPXfhwoUkJycf8OWPNUIAbIbROqeIeL1eVq1axbBhw3A4HE123SqvD8+sBEJsbgqvXU5k576Hf1Ib11zvZWtmtZqtVi9Yr+bWXG9FRUXtwMn9F+E6VuaYkDJcLteRjQlp49pSvYf6nTfm76rlekIcNhtVqCdERETE3ywXQux2G57qEOLzakyIiIiIv1guhAB4qicFeQ+ztK6IiIg0H4uGELMnxNDtGBEREb+xaAip7gnxKoSIiDSVVjrPQZpBU/2uLRlCvLbqnhCNCREROWYB1Xty/Xq1T2m/an7XNb/7o2W5FVOh7naMFisTETl2DoeDqKgosrOzAZpsiqlhGFRWVmK321v9lNWm0BbqrZlGnJ2dTVRU1DFPF7dkCPFWl23odoyISJNITEwEqA0iTaFmv5aAgIBW+6HclNpSvVFRUbW/82NhyRDisTnB0MBUEZGmUrPbbHx8PFVNNPPQ6/WyYcMG+vTp0+oWaGsObaXegICAJmtfo0NIXl4ef/zjH1m2bBkOh4Ozzz6bu+66C6ez/qWuueYafvrpp3rHysrKuPDCC5k1a9axtfoY1fSE+NQTIiLSpBwOR5N9QHm9XsBclrw1fyg3FavVC0cRQm699VYSEhJYvHgxubm53HjjjcybN++Anf9eeOGFej/Pnz+fZ599lt/97nfH1uIm4K3pCdHAVBEREb9p1OyYzMxMli1bxh133EFISAhdu3YlNTWVN95445DP27JlCw8//DBPPfUU8fHxx9TgplA3O0Y9ISIiIv7SqJ6Q9PR0oqKiSEhIqD3Wu3dvsrKyKCoqIiIiosHnPfTQQ5x77rmkpKQ0uoE13VNNqfZ2TJW7Wa7f2tTUaIVaa1itZqvVC9ar2Wr1gvVqbi/1Nqb9jQohpaWlhISE1DtW83NZWVmDIWTFihWsXr2ap556qjEvVSstLe2onncoVYY56jgnew/Fq1Y1+fVbq+Z4L1s7q9VstXrBejVbrV6wXs1WqrdRIcTlclFeXl7vWM3PoaGhDT7nP//5D6eddhpxcXFH1cDBgwc3+QCdHz4NBB90iI4kadiwJr12a+T1eklLS2uW97K1slrNVqsXrFez1eoF69XcXuqtqeNINCqEJCUlUVBQQG5uLrGxsQBkZGSQmJhIeHj4Aed7PB6+/PJLnnvuuca8TD1NOdK6htdmrvBm83na9C+6sZrjvWztrFaz1eoF69VstXrBejVbqd5GDUzt0aMHI0aM4LHHHqOkpIQdO3YwZ84cpk2b1uD5GzdupLKykuHDhzdJY5uKz16dvTQ7RkRExG8avXfM7Nmz8Xg8TJ48mRkzZjB+/HhSU1MBSE5OZsGCBbXn7tixg8jISIKCgpquxU3AZ6sJIR7/NkRERMTCGr1OSGxsLLNnz27wsZUrV9b7+dRTT+XUU089upY1I291CDF8mqIrIiLiL5bcRbemJ8SmdUJERET8xpIhxKgZE+LT7RgRERF/sWQIqRsTop4QERERf7FmCLEHVH+jnhARERF/sWQIqbkdY9PAVBEREb+xZgixaUyIiIiIv1kzhKgnRERExO8sGkJqlm1XCBEREfEXi4YQsyfErtsxIiIifmPpEGJTCBEREfEbS4YQ7IEA2AzdjhEREfEXS4YQn0M9ISIiIv5myRCCxoSIiIj4nUVDSM3tGIUQERERf7FkCLE51BMiIiLib5YMITXrhNjVEyIiIuI3lgwhKISIiIj4nTVDSM3tGIUQERERv7FkCLE7zJ4Qh5ZtFxER8RtLhhCNCREREfE/S4YQm9OcoutAIURERMRfrBlCqseEOAyvn1siIiJiXZYMIdSMCdHtGBEREb+xZAixOTQmRERExN8sGUJqZsc48YJh+Lk1IiIi1mTJEFLTEwKAlm4XERHxC2uGkOrZMQB4tVaIiIiIP1gyhNjr9YQohIiIiPiDNUOIc78Qop4QERERv7BkCHE4nHgNm/mDQoiIiIhfWDOE2MCDuWCZbseIiIj4hzVDiMOGuyaEqCdERETEL6wZQux2PDjMHzRFV0RExC8sGUKcdltdCFFPiIiIiF9YMoQ4bDaqantCFEJERET8wZohxG7DY9T0hOh2jIiIiD9YNoRUaXaMiIiIX1k2hGhMiIiIiH9ZMoRoYKqIiIj/WTKEmLdjNDBVRETEnywbQjxarExERMSvLBtCqgwNTBUREfEnS4YQ5/63YzRFV0RExC8sGULqzY5RT4iIiIhfWD6EGBoTIiIi4heWDSFVCiEiIiJ+ZckQ4txvdozPoxAiIiLiD5YMIfVvx7j93BoRERFranQIycvLIzU1lZSUFEaPHs2jjz6Kx9PwDJNly5Yxffp0kpOTmTBhAs8///wxN7gpOOx2qqo3sPN5FEJERET8odEh5NZbb8XlcrF48WLmz5/PkiVLmDdv3gHnZWRkcN1113HJJZfw888/8/zzz/PSSy/x6aefNkW7j4nDRl1PiG7HiIiI+EWjQkhmZibLli3jjjvuICQkhK5du5Kamsobb7xxwLlvvvkmkydP5rzzzsNms9G/f3/+/e9/M2LEiCZr/NHafxddnwamioiI+IWzMSenp6cTFRVFQkJC7bHevXuTlZVFUVERERERtcfXrFnD8ccfz2233cb3339PTEwMV155JRdeeGGjGuj1eht1/pHw+Xx4qbkdU9ksr9Ga1NTX3uvcn9Vqtlq9YL2arVYvWK/m9lJvY9rfqBBSWlpKSEhIvWM1P5eVldULIYWFhbz66qs8/fTTPPnkk6xcuZLrr7+eyMhITj311CN+zbS0tMY08Yh5bGbpudl7yVu1qlleo7VprveyNbNazVarF6xXs9XqBevVbKV6GxVCXC4X5eXl9Y7V/BwaGlrveGBgIJMnT2bixIkAjBw5knPOOYdPPvmkUSFk8ODBOByOxjTzsLxeLz++b5YeExVO12HDmvT6rY3X6yUtLa1Z3svWymo1W61esF7NVqsXrFdze6m3po4j0agQkpSUREFBAbm5ucTGxgLmANTExETCw8Prndu7d2/c7vozT7xeL4ZhNOYlcTgczfLLqLkdY/N52/QvuzGa671szaxWs9XqBevVbLV6wXo1W6neRg1M7dGjByNGjOCxxx6jpKSEHTt2MGfOHKZNm3bAuRdddBFffvklH3zwAYZhsHz5chYuXMg555zTZI0/Fl6bVkwVERHxp0ZP0Z09ezYej4fJkyczY8YMxo8fT2pqKgDJycksWLAAgLFjxzJnzhxeffVVRowYwT333MNdd93F5MmTm7aCo+StHhOiECIiIuIfjbodAxAbG8vs2bMbfGzlypX1fp4wYQITJkw4upY1M191TwgKISIiIn5hyWXboW5MCL6GV3sVERGR5mXZEOKrvh2D9o4RERHxC8uGEG9tCNHtGBEREX+wbAipHRPiUwgRERHxB8uGEK8twPxGY0JERET8wrIhxKjuCbHpdoyIiIhfWDaEeG2aHSMiIuJPlg0hNbNjbBoTIiIi4heWDSFGbQhRT4iIiIg/WDaE1MyOUQgRERHxD+uGEHt1T4ih2zEiIiL+YN0QotkxIiIifmXZEEL1OiF2Q7djRERE/MGyIcSwa0yIiIiIP1k2hNTcjrFrTIiIiIhfWDiEmLdj1BMiIiLiH5YNIUb17BiH4QHD8HNrRERErMe6IaRm2XYAn9d/DREREbEoy4YQ7AF132vpdhERkRZn2RBSMzsGAK0VIiIi0uKsG0Lq3Y7R4FQREZGWZtkQYrM78Rk28wev27+NERERsSDLhhCHHaqo7g3R7RgREZEWZ90QYgNPTQjRwFQREZEWZ9kQYrfZ6kKIV2NCREREWpqFQwi4MRcsU0+IiIhIy7NsCHHYwVMTQjQmREREpMVZN4TYbHiMmjEhuh0jIiLS0iwcQjQ7RkRExJ8sG0Lsmh0jIiLiV9YNIXabxoSIiIj4kWVDSL3bMRoTIiIi0uIsHEL2XydEy7aLiIi0NMuGkHpjQnQ7RkREpMVZN4TYoUpTdEVERPzGsiHEvB2jgakiIiL+YtkQYrdBlZZtFxER8RvLhhAtViYiIuJf1g0h9v1mx2hMiIiISIuzbAixqydERETErywdQjyGxoSIiIj4i2VDSP3FynQ7RkREpKVZN4TY978doxVTRUREWpplQ4h20RUREfEvC4eQ/Rcr0+0YERGRlmbZEFJ/F131hIiIiLQ064YQO3gMTdEVERHxF+uGEJtNy7aLiIj4UaNDSF5eHqmpqaSkpDB69GgeffRRPJ6Gx1Rcc801DB48mOTk5Nqvb7/99pgb3RTqL1amMSEiIiItzdnYJ9x6660kJCSwePFicnNzufHGG5k3bx7XXHPNAeeuXbuWF198kVGjRjVJY5uSZseIiIj4V6N6QjIzM1m2bBl33HEHISEhdO3aldTUVN54440Dzt2xYweFhYUMHDiwyRrblBz2/W7HaEyIiIhIi2tUT0h6ejpRUVEkJCTUHuvduzdZWVkUFRURERFRezwtLY3Q0FBmzpxJWloasbGxXHnllUybNq1RDfR6vY06/0ivuX9PiOGtwtcMr9Na1LyHzfFetlZWq9lq9YL1arZavWC9mttLvY1pf6NCSGlpKSEhIfWO1fxcVlZWL4S43W6GDRvGzJkzSUpKYunSpdx8882EhoZy2mmnHfFrpqWlNaaJR8xhgwojEIDi/D2kr1rVLK/TmjTXe9maWa1mq9UL1qvZavWC9Wq2Ur2NCiEul4vy8vJ6x2p+Dg0NrXf83HPP5dxzz639+YQTTuDcc8/lk08+aVQIGTx4MA6HozHNPCyv18s3S1exw4gDILwqh2HDhjXpa7QmXq+XtLS0ZnkvWyur1Wy1esF6NVutXrBeze2l3po6jkSjQkhSUhIFBQXk5uYSGxsLQEZGBomJiYSHh9c7d/78+Qf0erjdboKCghrzkjgcjmb5ZdjtsNXoCICtcCcOnxsCQg7zrLatud7L1sxqNVutXrBezVarF6xXs5XqbdTA1B49ejBixAgee+wxSkpK2LFjB3PmzGlwnEdJSQkPP/ww69evx+fz8c033/Dhhx9y4YUXNlnjj4XdZiOfcAqM6h6cvAz/NkhERMRiGr1OyOzZs/F4PEyePJkZM2Ywfvx4UlNTAUhOTmbBggUA/OY3v+Gyyy7jd7/7HcnJyTz11FM88cQTpKSkNG0FR8lhA7DV9oaQt9mfzREREbGcRq8TEhsby+zZsxt8bOXKlbXf22w2UlNTawNKa2OGENhidCSZzZCX7t8GiYiIWIxll223280UssVX0xOi2zEiIiItybohZL+eEABy1RMiIiLSkiwbQmpux9SNCUkHw/Bfg0RERCzGsiHEbrNhs8FWI9E8UFEIZXn+bZSIiIiFWDaEADjtNioJxBvexTygGTIiIiItxtIhxG4z78lURfc2D2hciIiISIuxdAhxVo9OrYrsZR5QT4iIiEiLsXQIcVSHkMoohRAREZGWZukQUtMTUhnR0zygECIiItJiLB1CahYsq6i5HZO/BXxeP7ZIRETEOiwdQmp6QspDOoIjCLxuKNju51aJiIhYg6VDSM2YEC926FA9Q0a3ZERERFqEQgjg8RkKISIiIi1MIQTwGQZ0SDIPaq0QERGRFmHpEOK0m+V7vAZ06GMeVE+IiIhIi7B0CKkdE7L/7Zj8LX5skYiIiHVYO4RUL9vuNQyIqQ4hhTuhqsKPrRIREbEGa4eQ2p4QH4TGQlAEYMC+rf5tmIiIiAVYO4Q4qmfHeA2w2SCmZvn2DD+2SkRExBosHUKc+48Jgf3GhSiEiIiINDdLhxD7/mNCoG5ciHpCREREmp2lQ8jBe0I0Q0ZERKS5WTqE1K6Y6q0JITVrhagnREREpLlZOoQc0BNSMzC1OAvcZX5qlYiIiDVYOoTY7b8aE+KKgZBo83vdkhEREWlWlg4hzv03sKsRoxkyIiIiLcHSIaR2sTKvr+5gB82QERERaQmWDiHqCREREfEfS4eQmjEhPmO/EFLbE6IxISIiIs3J0iGk4Z6Q6hky6gkRERFpVpYOIXVjQhroCSnZC5XFfmiViIiINVg7hPx62XaA4EhwxZrfa5quiIhIs7F2CHH8arGyGpohIyIi0uwsHUIaHBMCmiEjIiLSAiwdQhx2s/wDe0KqB6dqhoyIiEizsXYIMTtCDgwhNT0heZtbtkEiIiIWYu0QcrCekLh+5p+5G8H41WMiIiLSJCwdQurGhPjqP9ChD9jsUFFoTtUVERGRJmfpEFK7Tsive0KcQRDdw/w+Z2PLNkpERMQiFEJoIIQAxPU3/8zd1IItEhERsQ6FEBqYogsQ29f8M2dDC7ZIRETEOiwdQpxH0hOi2zEiIiLNwtIh5JA9IXE1PSEKISIiIs1BIQTwHep2TGk2lO9rwVaJiIhYg0IIB+kJCQqHiC7m9zkanCoiItLULB1CasaE7Mgvo6LKe+AJcRqcKiIi0lwsHUJG9YwhOMDOhj3FXPPKCsrcnvonxNasnKqeEBERkaZm6RDSLcbFK1eNIjTQwXebc7nypeWUVO4XRGqWb9fgVBERkSZn6RACMLpXB169ejThQU6Wbcvn4YXr6x5UCBEREWk2jQ4heXl5pKamkpKSwujRo3n00UfxeDyHfM6mTZsYOnQoS5cuPeqGNqcR3aP5y/ShACzfll/3QM1aIYXbwV3qh5aJiIi0X40OIbfeeisul4vFixczf/58lixZwrx58w56fnl5ObfffjsVFRXH0s5mN6xrFADb88uo8lZvaOeKAVes+X1uun8aJiIi0k41KoRkZmaybNky7rjjDkJCQujatSupqam88cYbB33OQw89xJQpU465oc0tISKI0EAHHp9BZl5Z3QO6JSMiItIsnI05OT09naioKBISEmqP9e7dm6ysLIqKioiIiKh3/vvvv09mZiaPPvooc+bMOaoGer0NTJ09RjXX/PW1e8aGsjariPS9RfTsEAKALbYv9szv8WVvwGiGtrSEg9XbnlmtZqvVC9ar2Wr1gvVqbi/1Nqb9jQohpaWlhISE1DtW83NZWVm9EJKRkcHTTz/NW2+9hcPhaMzL1JOWlnbUz23staOdbgC+X5NOvHs3AHEVoXQDCjOWsiVmVbO1pSU053vZWlmtZqvVC9ar2Wr1gvVqtlK9jQohLpeL8vLyesdqfg4NDa09VllZycyZM7n33nvp1KnTMTVw8ODBxxRiGuL1eklLSzvg2iPyN7N4+2bKAyIZNmyweTCyENY9S5R7N8OGDWvSdrSUg9XbnlmtZqvVC9ar2Wr1gvVqbi/11tRxJBoVQpKSkigoKCA3N5fYWHPAZkZGBomJiYSHh9eel5aWxrZt27jvvvu47777ao/fcMMNnHPOOfzpT3864td0OBzN9sv49bWTEsyenK25pXXHOw4BwJa/FYenzFzOvY1qzveytbJazVarF6xXs9XqBevVbKV6GxVCevTowYgRI3jssceYNWsW+/btY86cOUybNq3eeSkpKaxZs6besX79+vHPf/6T0aNHH3urm0mvOLM3JyOnFMMwsNlsEBoL4Z2gOAv2rodurbf9IiIibUmjp+jOnj0bj8fD5MmTmTFjBuPHjyc1NRWA5ORkFixY0OSNbCk9Y0Ox2aCwvIr8UnfdA4nHmX/uWdPwE0VERKTRGtUTAhAbG8vs2bMbfGzlypUHfd7Gja1/imtwgIPOUSHs3FdORk4pHcKCzAcSB0P6/2CPdQYLiYiINDfLL9v+a73jwgDIyCmpO5hYPUhVIURERKTJKIT8Ss24kC31Qog5OJXs9eA99BL1IiIicmQUQn6lridkv71iontCQCh4KiA/w08tExERaV8UQn6lJoTU6wmx2yFhkPm9bsmIiIg0CYWQX+ldfTtme34ZlZ79lp6tHReiGTIiIiJNQSHkV+LCgwgPcuIzYPv+G9lpcKqIiEiTUgj5FZvNtt+iZQ0MTt29BgzDDy0TERFpXxRCGtDg4NT4AWCzQ1kulOz1U8tERETaD4WQBjTYExLogg59zO91S0ZEROSYKYQ0oE+8uUld2s7C+g9oXIiIiEiTUQhpwNheHXDYbaRnl7AjX4NTRUREmoNCSAMiXQGkdI8G4Mtf9hv/URNCdq/2Q6tERETaF4WQg5g8IB6ALzdk1x3smGz+mZ8B5QUt3ygREZF2RCHkICYPSABg6ZZ8Siqr94sJ7QBR3c3vsw6+Y7CIiIgcnkLIQfSKDaVHBxdur4/v0nPqHug8wvxz10/+aZiIiEg7oRByEDabrbY35Mtf9rsl03m4+ad6QkRERI6JQsghTO5vjgv5emM2Pl/1Kqm1PSE/+6lVIiIi7YNCyCGk9IghPMhJbomb1TsLzIMdh5orpxZnQdFuv7ZPRESkLVMIOYRAp50T+8UB8FXNLJnAUIjrb36fpd4QERGRo6UQchg1t2Q+StuNUbNxXc24EA1OFREROWoKIYcxdWACrkAHW3JKWbo13zzYqSaEqCdERETkaCmEHEZ4cADnDOsMwBtLt5sHa2fI/Aw1vSMiIiLSKAohR+DS0d0A+HTtbnJLKiF+EDiCoKIQ8rf4uXUiIiJtk0LIETiucyRDu0ZR5TV4Z8VOcAbW7SOjWzIiIiJHRSHkCNX0hry5LNNcM2T/WzIiIiLSaAohR+isIZ0ID3ayI7+cxZtz6xYt27nCvw0TERFpoxRCjlBIoIMLhncB4PUfM6HrKPOBrJ+hstiPLRMREWmbFEIa4bIx5i2ZL37ZyxZvPET3AJ8Htn3v34aJiIi0QQohjdAnPpzJ/eMxDHjhu63Qa5L5QMZX/m2YiIhIG6QQ0kjXT+gNwPyfdlLYebx5cMvXfmyRiIhI26QQ0kgje0ST3C0Kt8fHq7u7m5vZ5W6Cwp3+bpqIiEibohDSSDabjetPNHtD5i7Pw9uxeqpuhnpDREREGkMh5ChMHZhAr9hQiio8rAmqCSEaFyIiItIYCiFHwWG3ce2JvQCYs92cMcOWb8Dn81+jRERE2hiFkKN0/vDOdO/g4uvSblQ6XFCeD3tW+7tZIiIibYZCyFEKcjq49/QBeHCyuGqAeVDjQkRERI6YQsgxOHlgAuP6dOBb73HmAY0LEREROWIKIcfAZrPxwJmDWOwbCoCR+QOU5vm5VSIiIm2DQsgx6pcYzvgxo1nn647N8OL75UN/N0lERKRNUAhpAjOn9OUL+zgA9i55y8+tERERaRsUQppAdGggXcZdBEBc3lKKcvf4uUUiIiKtn0JIEzn7pPGk23vhxMeiBS/7uzkiIiKtnkJIEwlw2HEOOR+A6G0fsjm7xM8tEhERad0UQppQzxMvBWCsbR1/ff97DMPwc4tERERaL4WQphTTi8q4wThsBpGZn/HOT9pZV0RE5GAUQppY0NALADjLvoSHF65nd2G5n1skIiLSOimENLVB52PY7BzvWE83dzp3/zdNt2VEREQaoBDS1KK7Yxs8HYCZAe+yaFMO76zQbRkREZFfa3QIycvLIzU1lZSUFEaPHs2jjz6Kx+M54Dyfz8ff//53JkyYQHJyMmeddRYff/xxkzS61TvxDrDZmWL/iUG2rTywYC0rtuX7u1UiIiKtSqNDyK233orL5WLx4sXMnz+fJUuWMG/evAPOe+ONN3j//fd57bXXWLlyJbfddhu3334727dvb4p2t26xSVDdG/JI1IdUVPm4at5y1mcV+blhIiIirUejQkhmZibLli3jjjvuICQkhK5du5Kamsobb7xxwLmXXnopCxcupFu3brjdbvLz8wkJCSE4OLjJGt+qVfeGJJcvYXrnPIorPFzx0jK25Zb6u2UiIiKtgrMxJ6enpxMVFUVCQkLtsd69e5OVlUVRURERERG1x+12Oy6Xi++++45rr70WwzC45557iI+Pb1QDvV5vo85vzDWb49q1onthG3QB9rXv8GjUh6z1/o5f9hRz2YtL+c+1o0mMbLkw1iL1tjJWq9lq9YL1arZavWC9mttLvY1pv81oxNSNDz74gKeffppvvvmm9tj27duZOnUqixYtIjEx8YDnuN1u7HY7y5cvJzU1lUcffZTTTz/9sK/l9XpZtWrVkTatVQoq2c6gr3+LDR8rUp7md6s6safES5cIJ49MjCE8SOOCRUSkfRo2bBgOh+OQ5zSqJ8TlclFeXn/di5qfQ0NDG3xOYGAgAGPHjuWcc85h4cKFRxRCagwePPiwRTSW1+slLS2tWa5d3zCMgsuwrXyVEbvf4D/Xv8eFc5exs6iSp3+u5NXfjiQsqFG/gqPScvW2Hlar2Wr1gvVqtlq9YL2a20u9NXUciUZ9AiYlJVFQUEBubi6xsbEAZGRkkJiYSHh4eL1zH3/8cQDuvvvu2mNut5uoqKjGvCQOh6PZfhnNee1aJ90Ha/+LbdcKumd/xWtXT2b680tYvbOQG15fyQu/SSG0BYIItFC9rYzVarZavWC9mq1WL1ivZivV26j7AT169GDEiBE89thjlJSUsGPHDubMmcO0adMOODclJYV///vfLF++HJ/Px1dffcXHH3/M9OnTm6zxbUJ4Ihx/s/n9F38iqUMQr1w1itBAB0u25HHZi0spLKvybxtFRET8oNGDEmbPno3H42Hy5MnMmDGD8ePHk5qaCkBycjILFiwAYMqUKdx///3cf//9jBw5kueee46///3vDB8+vGkraAuOvxlC42HfVljxEkO7RvH6NaOJDAlg5fYCLvzXErKLK/zdShERkRbV6PsAsbGxzJ49u8HHVq5cWe/nadOmNdhLYjlBYTDpHvhwJnzzZzjufJK7xfP29WO57MWlbNhTzHnP/cBT04cytncHf7dWRESkRWh6RktJvgISB0NFAXxyFwD9EsOZf8NYusW42FVQzsVzf+RPC9ZR5j5wBVoREZH2RiGkpTiccPbfwWaHde/Cxk8A6N4hlI9uOYGLR3UDYN4P2zj56W9ZuDpLG9+JiEi7phDSkjolw9jfmd9/eBtUFAIQHhzAn88fzKu/HUWnyGB27ivn5rdWct6cH7TnjIiItFsKIS1t4j0Q3ROKs+DzB+s9dGLfOL64fQIzp/TFFehg1Y4Cpv1zCTe+/hOZeVruXURE2heFkJYW6IKzqwf2/vQyrH233sOuQCe/n5LEN3+YyMWjumK3wSdr9zDlr4u4/e3VvPZjJj9l7qPc3baX9RUREWmZVbKkvp4nwvG3wA+z4YPfQfwA82s/8RHB/Pn8Ifzm+B78+eMNLNqUw39/3sl/f94JQEiAg5MHJXBucmfG94nF6VCeFBGRtkUhxF8mPwi7V8HWb+E/l8G1X0Fw5AGn9U+M4JXfjmJJRh6L03NYv7uItbuKyC2p5INVWXywKouIYCcje8QwsmcMQ7tE0a2Di8SIYBx2G4ZhUFHlpcrr30GuX2/M5s8f/8Jj5w0mpUdMvcdyiiuJCw/yU8tERMRfFEL8xeGEaS/D8xMgbzO8nwoXvg42W4Onj+3doXYNEcMwWL2zkPdX7mLh6izySt18uSGbLzdk157vtNsIDXJSWunB4zOw22Dgjz8wons0o3p2YFL/OFyBLffrf+aLdDbtLeGBD9bx0S0nYKuu84XFW3jko1+4/4wBXDO+V4u1R0RE/E8hxJ9CY+HCV+GlU2HDh7D8BRh17WGfZrPZGNY1imFdo7j/jAGs313Esq35LNuaz6a9xewqKKfKa1BYXrccvM+AtVlFrM0q4pUlmYQEODhpQDxnDenI+KS4Zt2/ZmtuKat3FACwfncRn63by6nHJbKnsIL/+98mAGZ/mc70lK5EhgQ0WztERKR1UQjxt84jYOos+PRu+Ow+6H48JAw64qc7HXaGdIliSJeo2p4Er88gu7iCkgoP4cEBhATYWPLTairDOrFyRyFfb8wmM6+Mj9bs5qM1uwl02hnXuwNTBiYwZUACCRHBTVriB6t2mW212/D4DJ75YhMnD0zgyU83UF5lDrAtqvDw8vdbuXVK3yZ9bRERab0UQlqD0TdAxteQ/hnM/y1c+7U5i+YoOew2OkaGQPUQE6/XS5zLwbAhHTknuQsPGgNJ21XIh2t28+naPWzPL+PrjTl8vTGH+95by5AukUwZYAaSAR3Dsdls7Cms4INVu9iSU8rvTupD15i69q3dVch7K3dhGBDotJMYEcSFI7sREujAMAw+WJUFwL2nD+Cvn29iw55invhsA++uNMPJ9Sf24vlvt/Did1u5alxP9YaIiFiEQkhrYLPBuXPgH8dDzgb47F4465lmfDlbbe/JPaf1Z3N2CZ//spcv1u9l5Y4C1uwsZM3OQv76+SY6R4XQOSqE5Zn51Czg+tn6Pfz94mRO6BPLvB+28djHvxww8HXp1nzmXDqcNTsL2ZpbSnCAnRkju7KvzM3fv9rM84u2AHDB8C7cdWp/vtmYw8a9xbz43VZum6reEBERK1AIaS1CY+G8f8Jr55nrhyQMOqLxIcfKZrORlBBOUkI4qRP7kFNcydcbsvn8l70sTs9hV0E5uwrKARjVI4ZSt4d1WUX85qVlDOsaxc/bCwA4qX88/RLDKXd7eWNpJp+s3cOby7aTkW0usjZ1YCJhQU6uOaEX877fRnGlB1eggztP7YfdbuP3U5JIfeNnXv5uK6N6xFDm9uAzYELfOEICHc3+PoiISMtTCGlNep8EJ/0RvnoYPrkTIrtAv9NatAlx4UHMGNmVGSO7UlHl5bv0XHYXljOxXzxdY1xUVHn54/treeennfy8vYAAh437Th/Ab47vUTvjpUt0CI989AuzFq6vDRDnDusEQKQrgN+d1Ic/f7KB20/uVzv+5NRBifRPDGfDnmIue3FpbXuGdonkjWvHENaMA2dFRMQ/9H/21mb87VCQCT+/ao4Puepjc88ZPwgOcDBlYMIBx56cNoQR3aP53/q93HxSH5K7Rdc757fjevL95ly+3phDpcdHtCuAE/vG1T5+3Ym9OG94Z+LD6wbA2u02/nT2IO6cvwan3UZESABbckpYvbOQ615dwUtXjiQ4QD0iIiLtiUJIa2OzwRl/hcKdkPEVvHkhXL8YwhMO/9wWYrPZuGhUNy6q3vn31+x2G09NH8ppf1tMdnElpw/uSMB+K7rabLZ6AaTGmF4d+PbOSbU/r95RwCVzf+SHjDxufmslpwxK5KfMfWTklDCpXzxXn9CTQOehV4otqfTwYdpOkrtGM7BTRL3HPF4fDruttgdHRERalkJIa+QIgOmvwIsnQ84v8N+r4YoPwN52egI6hAXx0pUjeW1JJrdMTjqqawztGsXc36Rw5cvL+Xz9Xj5fv7f2sWVb83lnxQ7uPX0AxZVVfLRmN0u35NO/YziXj+3BlH6xfLW1jBs+WUxOSSV2m9lDM3NqX0oqPTy/aAtvLdvOcZ0jeHLaUHrGhtZeu7CsCleQo15wEhGRpqcQ0loFR8CMV+FfE2HbYvjmcTjpPn+3qlGO6xzJE9OGHNM1ju8dyz8uHc4f319Lp6gQRnSPJi48iH8u2sKW3FKueXVFvfOXb9vH8m37CHTacXt8AMSEBpJf6uaF77ayYHUWBeVVtY8t37aP0/+2mHtP709MaBBvr9jB4vQcOkWF8Nh5g2tvI63cvo9/Lsqge4dQbpvat/bWUJnbw3NfbybY6eA343oQEazpxSIiR0ohpDWL62tO1X33Wvj2L9BtDPSZ7O9WHZ2dK+DrR+HUxyGuX93xjK/g/ZvMnYWTpjb41MkDEpg8oP7tqBkju/K3L9J5c+l2OkeHcPrgjkzoG8u3m3J5c9l2coorcTlt3DKlL1ed0JMfMvK4/721tTN9UrpHc9W4nryxNJMfMvL44wfr6jd3XzlXvLSM85M7U1zpqdcLsyQjjzmXDqeoooqb31rJlhxzBtCL32/lxgm9uXxs9xZdEl9EpK3S/ylbuyEzIPN7+GmeeVvm0v9ClxH+blXjffNnM3B8+xRcMLfu+PezoTgLvnv6oCGkIRHBAfzxzIHcf8aAemM6RnSP4aZJfVizYx/Fu7dw4uieOBwOJvWL5/PbTmT+TztJig9nTK8YbDYbpx2XyCtLtvHEpxuICglk2ogunD64I2+v2MErS7bVLqhmt8EZQzqxOD2HtF2FnDF7MRVVPtxeHwkRQYQFOcnIKeXPn2zgz59sID48iM7RIfRPjOCk/vGM69MBV6CTiiovWQXlhAY5D1iZNqugnPIqL71iQzVORUQsQSGkLTj1cdiTBrt+gnlnwPR50O/UI3++pxJnRR61q421NHcpbF1sfr/pU/C4wRkI5fvMW00AmT9A8R4IT2zUpRv6sA502knuFsWq/PpjOlyBTq4Y26PeMbvdxlXjenLxqG4EOuzY7eb1/nT2IM4e1onHP95AXEQQM6ck0Sc+nF0F5aS+8XPtXjhTBsTz5LShRAQ7eW/lLmZ/lc6O/HKyiyvJLq5k5fYC3lq2nUCnnWhXAHuLKmtfe3DnSE4emECg087HabtZvbMQgO4dXJw8MIHje8cS6QogIjgAh91GYXkVheVVRLsCGNIlqlHvk4hIa6QQ0hYEhJgDU9/+DWR8Cf++GE59AkZefejBqiU5sHwu9mVzGVqej7E4ChIHQ5cUGH4FxLTQrrVbFoG3+sO3sgi2fgtJU2DT/8DnqT7JgPULYPR1LdOmX2lo+u/wbtG8fcPYesc6R4Xw9vVjeOm7bcSFB3HB8M61QWh6SlemjejCvrIqdu4rY0d+Ocu25vHlhmx27iuvDSCuQAflVV7SdhWStquw9tp2GzjtdjLzypi7eCtzF289aHsn9Yvjj2cOpFdcWFOULyLiFwohbUVQOFzyH1h4K6x6HT65w9x1d8KdMOi8ujDi85m3b1a/BWv/C54KavoKbBUFZs/DtsXw3TPQ91QYcwP0nGBODW4Khbtgx1IYcDY4qv96bfq0ugEOMLzwywdmCNmw0Dwe3hGKd8O69/wWQhojyOngxom9G3zMZrMRExpITGggQ7pEccaQjvzpbIOMnFKKK6roFuMiJjSQvFI3X1TP+KnyGUwdmMCpgxJxBTpYtCmHz9btYeOeYoorPBRXVOH1GUS5AgkPdpKRU8LXG3P4bvO3nDusM4mRwQQ5bWTvKWVJYQZenw2nw0bnqBC6RIfQvUMoceFBLfwuiYgcnkJIW+IIgHOehfgB8O2TkLvRHCfy8R0Q0clc+j0vAwp31D2n03C8Y3/H6rJEhnZ24chZZ37Yb/4CNn1ifnUba67U2mPc0bfN5zOXm//8AXCXmIuuTX7AvAWU/j/znLE3wQ+zYcNHcMqfYfOX5vEz/g/+fQlsXwJFuyGiI6x7HxbcDFP+ZPb4tGE2m40+8fV7LGLDgg661srpgzty+uCOB73elpwSHvnoF77akM07P+2s/+Ca4gaf0yc+jBP6xDKmVwd6xYXSNdql5fBFxO8UQtoamw2O/x0MvxyW/guWPAvl+eZXjaAIGHQuDLsUuo4Gnw9j1SroOAS6JEPyZZCbDkv/CT+/Zn74zzsdek2EEVeZS8U7f/UvZ8OAH/9hhpZTHzf3tqmxLxM+uKlufAeY546+0Rx0WrwbAlww4S5Y+RqU5cHXj0FVGUR2g36nQ5dRsHMZ/LIAek82r+cugc/uM2cERfdovve0jekVF8ZLV45kcXoO323OpcLtpcztYW9OHglxHQhwOqis8pFVUM7OgjJ27itnc3YJm7NLmPfDttrrJEYEk9IjmjG9OjC6Zwy94sJw2DUgVkRajkJIWxUcCRPuMHsX8tKhNNf8Cgw1P7QDQg79/Ngkswdi/O3m9N+fX4Ut35hfwVEweDqMvt48z1sFH840AwTAq+fAlR+bU4j3pMHrF0DJXjNoTH4Q0t42B9F+91cIqV7SvdckCAozA8eqN2DpP8zj/c8wg9Wg88wQsuZt83XcJYANPOVmT88lbzfdLaNDMQzY/qO5Qm1LjZk5SuOT4hifZK5j4vV6WbVqFcOGDcbhqN/DUVDmZklGHos357JqewE78ssorvSwp6iCD9fs5sM1uwFzrEr/xHCGdo3ihgm9D5i9IyLS1BRC2rpAF3QcevTPj+gEZz4N435vTgNe/R+z92L5XHPMSb/Tqme3LAKbHSK7mnvbvHq2eavk4zvMwaYJx8GFr0NMTzOcvHYeLH8Rorqar9P3FPPPAWebIcTwVf98pvnnwHPgs3tgV/XiY64OMO1lM+Ck/w/Wf2D27jRWSTb88IzZvqEX141TKcmGte+aYaPfGeZsnZJsM2xt+NCsdeC5ZkiLHwgle6Bgh7mIXGzfujE47lLIWgV2JyQeZ4ZAw4D8LbDtO3MGUGAoBIZBwsBj+10dpShXIKcN7shp1bd4DMOgsLyKDXuKWbolnx+35LFyxz7K3F5+3l7Az9sLeH/lLp64YAgnD2rcbCURkcZQCBFTdA8zVJz0R9jyNSx7wbz1svFj8/GAUJj2EnQZaU4TzvkF3rvefKzb8XDxWxASZf7caxJ0H2cOkM3bbB5LOrn6sYnmB7K7BEJioOsY83hkZ/PW0Y7qHXTPex56TYDxt8GiJ+CTu8znlew1b+d0G2O25RC9I66CjdhfvAyKsswD3z8D4/8Au1ebgctTXn1iLAw82xwrU76vbgDtunfNL0cgeN11Fw4Mg47DzPC1d515LpjBpUOSebx4d8ON6n0STLwHEoeYtW79FjwV5iaFnUdAaJw5pqdguxlsuo01gyaYU5szvzfP73vqUfcM2Ww2olyBjOnVgTG9OvB7kvD6DLbmlrIuq5B/fbuFdVlFXPfaT1wwvAvdYlzm4FjDYGK/eMb17oCzekn7vJJKNuwpJjEymO4xrtrjIiJHQiFE6rM7oM8U8ys3HZY8Bzkb4dQ/Q6dh5jlXfAAvnwb5GWYvwrQX69/+sdlg0n3mOBMw//UfUT3QMiDYDCTr3jVvzTj2+ys48hrzg/nEO+sWLjvhNkibb77WGxfUb2uHJBhyoRkmdq+B3E0Q1Q26jsYWFE6/7x/F5nNDdE8zGORthvdvqHt+x6Fm70fxbljxknksYTCcO8esYfH/mQNkvW4zmER2htI8M0Blfld3nfBOgGFeJ3ejecwRaIakyK5QVQoVhbDte3PBtoyvDgw2B+MIMgcMB0eaA3kri8zjQy+GM58x38+DKd9nLgaXsxFOfxIiuxz8Zezm4Nk+8WGcelwi//e/Tfzr2y389+f6A19f/t6cmnxiUhy/7C5i/e6i2scCHDZ6x4Vx7+kD6u2aLCJyMAohcnCxSeay8b8WngDXfmWO++g5oX6QqNFjnPmv/oyvzKCyv6kPmTN5Trit/vEhM8yAUtOjAuaH7Llz4N3rzDEnER3NPzO+MsfCfP1I/WsUZMK2xdT8e9xIOgXbBXPNXoof/wE/zoG4AXDiH8z2+byQ/pk5FqXjEDj+FnMWEpiLwp26xwwL4Z3MOn1e80M9a6UZvLqOqvtwL95rjpEJCDZ7NX49Lid/qxlsVr9lXjMs0eztCY4038s9aebx4CgzTJXvM3tFMr6qu0ZoHJTlm9fITYfTnoQtX2NPe4dh+7Zj/6X6fa8qMwNIZfU6JPu2wlWf1H9vDyLI6eDe0wcwsW8c767cRYDDTkSwk+JKD5+k7SanuLJeOOnewUVOcSVlbi8b9hRzzSsreO7S4Uwd2Hp2fhaR1slmGP5aRvPQ6gbaDTtgoF1rvnZr5Ld6y/LNHo9hlx5+oGxjVRabt082fmoGmsTBENff7DHZvhQjZwNZ4UNJnP4XHM79NpUzjJYZ4HooJdlm+2N61W+Lp9L8Co4wfzYMs3cn/XOz96X3ZDPcbPvWXLiuouDwrxU/yLx9VbIHep5oLvtfkGnu47P9R3PRutE3gCvGnGa9c5l5iylxiNlT5Aysdzm3x8eiTTn8lLmPgZ0iOL53B2LDgvD5DHYXVfDYR7/wUdpunHYbf784mdMGd8Tt8VHh8Tb55n7677j9s1rN7aXextShnhBpPq4Y8xZLcwgKNz9Ah19R/3jP8TDiSnxeL3tWrSLR9qsxCv4OIABh8ebXrzmD6k+NttnMzf723/APzHE1130Nb10CORug53h8x01jY0Eg/YKysW/52gwto66D4y6AvWvh5dPN8SdzJ0H2L3XjWBY9AUvmmLe/Mn8ww0pte0LMIBLoqh0nE1iay9SSbKZ6KyHwahj4BwDs3ko6r5rNszlvcE9kAOvKosj6Tyxz5oeR7wmijGD6d4rkvGFdCHcFmVO8EwY33ItWuBNWvWn2CHUZYZ73qzAkIu2DQohIWxTTC25YXD3ANxrD66Vs1SqMYTPghN/XP7fjUJjxCrwxwwwkAH1Pg/6nw9LnzWPr3jWPB0WaY3/2pJlrz+z48eBtWPyUuSrv6OvNNWf2bcMGdAG67P+Pn5oOkBzg87rDXmco+TFDCel3EmEDT4EOveGHZ83NDGsGDYM5LmbMDeb070NtUyAibY5CiEhb5QioW4flcPpMMYPI+g8g5WroXr0nzrDLzFlQO5ebM5p6TjB7HXw+81bQ3rXm/j41U6pD48xenPwt8Om95liTT+82HwvvaAYFVwwUbKdg9xZs7mJCjHIqSotYn1VAcYWHECoZYt9KhKeUuOwfIPsHWPwIht2JrWYvoS6jzNtSu36qHmD7N3MszgUvmuvNiEi7oBAiYhUDzjK/9me3mwvG9T/jwOPx/c2vhnQcagabrx6FtHdg6EUw8W7zNlm1qP1ODwRGeH3M+TqDFxZvIcDuY2ToXoZ41tGvdDlj7esJ9VWSZcTyN/sVfJdzAmcM7cTvp/UhdPMCeD/V3IPoper9jrxubJ4qgitigWHH/t6IiF8ohIjI0QkKh9MeN7+OQIDDzu+nJHHL5D61Ow8bhsHybfuY+c0vbNuURqaRQCWBUF7Bv77dwoers5h1zglMufIjeOsi2JtmLukP2IGB2DEKvoST7jcX3hORNkUhRERalG2/wcE2m41RPWMY1XMcuSUpFFd4qPL6ajfp27mvnGteXcGQLpFMP+4lzt33MuG+InAEYVQWY9v2LbZVb5ir3w44y5wy3SWl4UGvhTvBHmBOMReRVkEhRERahdiwIGLDzNlBfRPCmdA3nr99mc4Li7ewZmcha3bCH5nG0C6RXDamO6cfl8COxf+mX+Zr2HYuM/csSnvbvFhUN3PNl+TLYN82c0ryLwvNxxKHmLOBXLHmbKCSbIjpDSN+UzdrKXOJuTlkaJy5YeOhFoWrUZJj/hl2hAu1VRSZA4ubqwfHXQp715vTuu37zRIzDCjNaXiGlkgLUwgRkVYpJNDB3af15+oTevL5+r18um4PP2zOZfXOQlbPX8OjHwXQK7ITfTr9hZGD1nGcZy3dytbjyv4ZW8F2+PgPeL98BHtlITYMwAYYsGeN+fVr3z5pTmku3Fl/R+j8DLjoTfP2U85G+HKWuTpux2HQebg5cHb9AnONlZo9h8bdYj5esN3cJsBTaS7N36G3GXp+fM7cW6mq3NxdevztdT03ORvNfYp6n1Q/PPyaYZgL1m382AwVI6+u23RxXya8Md1cwbf/mXD+v8w9jMryzYX/Nn9uTvWe/IAZUgzDnO6dvd6cOVWzVUBb5fOZC/oZPnO8kyumcc83DHNhwoamkEuT0mJlbXhBmCNltXrBejVbpd7ckkreXrGDN37czq6C8gbPiXBWMc32NVfbF9LZlgfAhuhJRJ/5IAkJXSDjS3MV2ppVa10dzFVzdy6vu4g9wNwwceMnZm9Fp+HmYm9LngNf1ZE1Niiibpn9GsFRZvDwVtY/3nWMOdV55Wt1K+R2G2tuLhk/APIy8C19nvJN3+AKDcPmCDJ7cfK31F3DGQKT/2juwfTWxVCaXfdY4mBzX6iP7zAXq9tf9xPMoFWz31GXUXDpO0e0um6tot3mh35AiLlNQkxPCE8034OG1uapKDSnd9sDzA0ho3uYIWjrt+YmlvGD4Pib8YbG1/299lWZ6+jUW+DPDStfNfehGnC2GRoqi+H9G+t6vuxOM9ANnm7esjvUwoneKnNfqW/+bP6ehl0KY240w+PR8PnMGWZbF5k7lFcUmoO4h15shsJfv3w7+e+4MXUohLThX/SRslq9YL2aLVevz+DHjBy+W7WRoKh4skvcbNpTzC+7iyh1mwuxBeDhxIBf2OmJZKPRDYfdxjnDOnH3qf2Jj2jg9sqO5bDqdXODwtE3mDtA7/oJXp9mrplSo++pcNw02LMadq00P/j6nWHuCF2WZy6Xv/a/5oJw9gAzRDiDzP2NasJHl5HmZooVhfDR7eAurru+zW6ujeIpNz9Au4yC7UuABv5X7Qg0w1FVRf39jMDcOXrCnfDhbVCWW3c8qjuc/hdzxeHV/667rjPEXIfFXWLOfrrsPQjtYLZx18/mba2CTLPXpXNKdQ9DB3O37S8frl/D/u0LjTe3cRg8A3qcYO6i/fVj9dvUEEcQvuTLyS4oIaF0A7bdq82wctL9MOj8uk0096SZ50f3NIPcT6+YjzkCoUMfs3enRnCkud9UwiBzG4X8LeYU9NA4c+Xl9R/UbbpZ9wsx97ka+VvodZIZgjZ/CcueN9+bEVeaAccRYIaYzO/NMLVzhbm9w6+DKJhhdMRvzJ63xCG1warB/44rS8x1eHYsMzcZTRh46PetFVAI8eO1WyOr1QvWq9lq9ULDNft8Bjv3leNw2OgQGkigw87XG7N58but/JBh9oqEBTm5dUoSF4/qRoDDToDDVm+w7AFyNsGbM8xQcerjB05nbkhx9ViTuH51q+B63Oa/im1280O+5jXzt8J7N5jL5Q+/3PwgtTnMXotNn9Re0kg6ma0RY+jeux8OwwPOYPPDPSjcvH3w8yvw2f1mGOg92dz7KDjCvCX05kWQvc7cm+n8f9WtL5P9i9nzEj/Q7HnJS4dXzzUDQkxv80N796q6dWLqsZlrwxRX71LdcZgZ3PK3mYGloVBiD6jrSYrpbY7dyU2Hop11eyl1HmEGpO1LDv7+xvWHvAzzWiHRZlv2D4phiXDh69B1pPn7WzvfXIW3cMdhfnGYY4Um3WMGmCXPQfr/6h6L6m7uXZXzS/3nRHYzB0Vv/uLA7RQCQs3w1WuC+btf+ry5vk7tNbuZvTgDzsLbaQSrVq8x/05XFMCaf8Piv9YFNlcHuGIBJB5X/zWyN5g1bvveDJLOIPPvR81XYKgZfHtNNMcsGYb5d7Q02+x1auLbTgohfrx2a2S1esF6NVutXmh8zat2FPDggnWs3lFwwGNx4UH0TwxnQMcIRvWIYUK/OAIc5ngMr89g+ZYcvIaN4d1jCAlshvfXMMyvXw8g3fiJ+a/poRfhjepx+HqLdpvnJ02t24gRzJ6SvevMcSmHGmcC5piUV8+puz0DZg9EXH/zAzMowgwuWT+bjwVHmovUjbiy/oq2VeVQmmv2nqz/wJzBVJZr3jqZeA+kXFXXRk+l2XNRE8wMA7Z+i2/5i+QXVxAz4jzs3UebPUzf/a0u4PQ9Dc76m7mA3c+vmptURnWDC14wbwftz+c1b4msfgvKC8zxMzG9zDaU5kDJXnPX65HX1O3fVPN+rHgJVr1VtyFkYJi5ZURonLkpZmlO3fmuDpB0ihmAOqeYAW//D3mf11zzZtWbZo/KfqsDG6HxFIb1IrIyyxzXVCOmlxlm9qaZ799vFgA2czzQ+gXm8SPVIal6/6rqWk5+FI7/3ZE//wgohPjx2q2R1eoF69VstXrh6Gr2+Qze+WkHf/lsE7kllQc9LzYsiPOHd8bjNVi4JoucYvPcQIed5G5R9OgQSnmVl/IqL+HBTgZ1imRw50iO6xyBK7B5BjO26O94X6b5wRs/wPxXfM1O0fsr2GEOuu06+shmBHmrzJ6X6B71P+QP9ZSGai7NgxUvmh/Mx13QcvtBucvMcSZVZTDovLoxM1XlZrAp3Gn2QHUbc+TbC7hLzSCy4UNzM86aYFAjbgCMTYWhl5i3yV47zwx/Nnv93il7gLl44IAzzTEvnkrwVJh/1oTBbd/W3boCs7ctti+cPdvsxWlC2sBORKQBdruNC0d2Y/qIrlR4vFR5Daq8Pnbkl7FhTzFpuwr5bO0ecksq+de3dYM+o1wBhAQ42F1YwdKt+Szdml/vuu/+vAuAIKedif3iOH1wR8b27kBooJMgpx2n4zC9D61NdHeY+tChz4nqan4dKUcAdBxybO0Cc5zKhDuP/TqNFeiCoRceeDwgBFJ+e5TXDIWBZ5tfHjfeLd+QtfprOiVPxdFlhNnLVCMkCq54H1473xy86wyGXpPMPaD6n3lkM4BKss3gGNHJvN20/4aZfqIQIiKWY7fb6vVYxIYFkdwtmouBh84exFcbslmwOgun3caZQzoxoW8cAQ4bmXll/JCRR35pJSGBToID7OQWu1mbVUjazkL2FFXw2bq9fLZub73XcwU66BrtomtMCF2iXXSNcdE1OoRuHVx0jXYRGqT/FVueMxB6Tya7uAOdeg6DhnoQgiPhyg/NgcKdhjU4w+aQwuLNW3WtiP7mi4jsJ8Bh55RBiZwyKPGAx3rEhtIjtuH/8RuGwS+7i/koLYtP0vawJbe09rEyt5eNe4vZuLeBwZpATGggfeLCmNg/jikDEugTF0ZWYTkZOaXkFFcS6LQTYIfcHDcDPD5cFrnlJg0ICDEHJLcTCiEiIk3AZrMxsFMEAztFcMcp/fH6DNweH5UeL3mlbnbkl7FjXzk788vYsa+MHfnl7NhXRkFZFfmlbpaV5rNsWz5PfrqRAIeNKm/Dw/Ue/+FLxvWJ5YSkWDpFhhAfEURcuLnabEBbu+0jltfoEJKXl8cf//hHli1bhsPh4Oyzz+auu+7C6TzwUm+99Rbz5s0jOzub+Ph4rrjiCi699NImabiISGvmsNsICXQQEuggyhVI77iwBs8rqqhiR34ZK7cX8OUve/k+Iw+3x0eAw0aPDqEkRgbj8RpUeLxs3VtEQaWX/63fy//W17/lY7NBjCuQuPCg2q/EiGB6x4XRNyGcKFcAP2/fx49b8tm5r4wzh3Tk/OFdFFzErxodQm699VYSEhJYvHgxubm53HjjjcybN49rrrmm3nlffPEFf/3rX5k7dy5Dhw5l1apVXHfddcTGxnLKKac0WQEiIm1ZRHAAgzpFMqiTuSdOaaWHvBI3naKC6w1o9Xq9/LxyJYHxvfg2PY9VOwrILq4kp7iSnJJKvD6DvFI3eaVuNuxp+LbP/han5zL7y81cP6EX/RLCiXQFEBkSQFxYUNsbSCttVqNCSGZmJsuWLePbb78lJCSErl27kpqayl/+8pcDQsjevXu59tprGTZsGADJycmMHj2a5cuXK4SIiBxEaJDzoANV7TYbx3WOZGi3+jMhfD6D/DI3OcWVtcEku7iCrIJyNmeXsDm7hLxSN4M6RTCmZweiXAG8siSTXQXlPPDBunrXcthtJEYE0zkqhE5RwXSODiEuLIj8sir2FJazr6yK4d2iOe24RHrEhmIYBtvyyti4p4hKjzlt1Gm3M7x7FB0jD7FEugiNDCHp6elERUWRkFC3FXbv3r3JysqiqKiIiIi6ud+/vu2Sl5fH8uXLueeeexrVQK/X26jzG3PN5rh2a2S1esF6NVutXrBezYerNzrESXSIk77xDQ+c9Xh99Xo4rjq+O28t38HHaXvYV+amsNxDYXkVXp/BroLyg+7NA/D5+r088ekGesWGklNSSXGF54BzbDYY2T2as4Z25LTjEol2BdY+9svuIt7+aScVbh/BgXZcAQ6Gdo1iYt84Ap11bayq8mAYhn7HbUxj2t+oxco++OADnn76ab755pvaY9u3b2fq1KksWrSIxMQDR5MD5OTkcP311xMdHc3zzz/f4PiRX6tZ7ERERFqG1zAoqPCRW+Ylt8xLTqmXnDIfBRVeIoLsdAhxEOi0sWpPJWnZbnzVnx4BdugW6cQVYAaIsiofGfvqgonDBkMTghjRMYjlWRWs2utu8PXDAm2M7RKMDcjYV0VmoQenzUYHl51YlwOfAUWVPkrcPjqFOzmxezBjOwcTEuDf20clbl91+3Uba39NvliZy+WivLx+Oq75OTS04fS9atUqfv/735OSksKf//znIwog+xs8eHCzrJialpbWLNdujaxWL1ivZqvVC9arubXVu6/MzU+ZBXSKDCYpIeyAAa5ZBeV8lLaHhat3s253ET/vqeTnPebKs3YbnHZcIv0Twymv8lJQVsWXv2Szt7iSz7fU/4zxYLCr2Muu4vr/us4rd5OW7ebFVSX0SwgnJNBBsNNBhcfLvlI3+aVVlFd58PgMPF6D7h1cnJ/cmfOSO5HQ0AaGv1Lu9rKvzM2+sioCHDZ6xoYS4LBjGAbrdxfz2bq9/LR9H5uzS8gtceOw2zhvWCdSJ/amewcXVV4fG/YUU1HlZXi3aBz2upVdfT5zkbyggPq/x9b2Oz5aNXUciUYlgqSkJAoKCsjNzSU2NhaAjIwMEhMTCQ8PP+D8+fPn88gjj3DLLbfw298e3YpyDoej2X4ZzXnt1shq9YL1arZavWC9mltLvbHhIZxy3MHHfHTtEMYNE/tww8Q+bM4uYcHqLH7YnMuAjhFcO74X3Tq46p3v9Rksycjjs3V7CA1yMrhzJP0TQ1m//hciO/Uku9iN024jOjSQsCAnSzJyeXflLrbklLJ6Z+FBWlEnI6eUv/xvE//3+Sb6JoQTHOAgOMBOkLPuzzK3h537ytm1r5ziyvq3mAIcNvrEh1NSWcWO/ANvVXl9BvN/3sV7q7IY1CmCTXuLqagye0i6RIdw6ejuJHeL4n/r9vJRmrkVwKieMZw+uCOT+sUTFx6Es3pfn+b8HVdUeQly2g+9aWMLalQI6dGjByNGjOCxxx5j1qxZ7Nu3jzlz5jBt2rQDzv3ss8/405/+xD/+8Q/Gjx/fZA0WEZG2pU98GLdN7cttU/se9ByH3cYJSeb6JzW8Xi8F4U6G9e5wwIfyiO7R3DSpD+uyithdWGHu5eP2EBzgICY0kGiXGVYcdht2u43v03N5e8UOVmTuO6LZQ2AGj2hXIOVuL8WVHn7ZXQRAcICdiX3jmdQ/jv6JEfSOD2PT3mJmf5nONxtzWFMdiiKCzY/YnfvKeeLTDQdc/8ct+fy4JR9YV/t6LqeNLt/9QMeoECKCnewurGBXQTl5JZXYbTacDnO13z7xYQzsFEGPDi5yiivZua+c3BI3Ua4A4sODSIwMZmSPGPonhmOz2Vi9o4Cnv9jENxtziA0LYkT3KEb17MAlo7o1z6aMR6jRU3Rnz57NrFmzmDx5Mna7nXPPPZfU1FTAnAHz0EMPcfbZZ/Pss8/i9Xq55ZZb6j3/rLPOYtasWU3TehERsSxb9Wyh4zpHHvbcGSO7MmNkV7bllrI9v4yKKi+VHl/1l5eKKh9BTjudo0PoGh1CfEQw4UFObDYbhmEO1v1ldzE24Pg+HQ7YqHB4t2jmXTWKtbsKycgpYVCnSHrFhuL2+liwOovXf8xke34ZE/rGceaQTvSJD+OL9Xv5KG03q3cWYBhQ5TUo9BoU7i5iXXXgaci+sip2FZSzaFPOQc+pkRgRTLcYF8u21e13lFtSWbu9QHFFFbdOOXg4bG6NDiGxsbHMnj27wcdWrlxZ+/3ChQuPvlUiIiLN4FBL7x+MzWajS7SLLtGuw57761AUbHcwI6UrM1IO3Ozv2hN7ce2JvfD5DErdHgpKK1m6ai1Rid3ZW+KmqNxDYmQQnaNcxIUHmTOFfAZFFVX8sruY9buL2JFfRkJEMF2iQ4gLD6KgrIqc4kq25paydGsee4oq2FNUgd0G5yZ35sYJvSksr2JF5j4y88o4a2inRr0XTU3LtouIiPiR3W4jPDgAV4CdnlEBDOsff9gxISO6H37X3IoqL0u35pO+t5hJ/ePrrdqb0uMIdt1tAQohIiIi7VBwgIMJfeOY0DfO3005KE1qFhEREb9QCBERERG/UAgRERERv1AIEREREb9QCBERERG/UAgRERERv1AIEREREb9QCBERERG/UAgRERERv1AIEREREb9QCBERERG/UAgRERERv1AIEREREb9otbvoGoYBgNfrbfJr11yzOa7dGlmtXrBezVarF6xXs9XqBevV3F7qrWl/zef4odiMIznLD9xuN2lpaf5uhoiIiByFwYMHExgYeMhzWm0I8fl8eDwe7HY7NpvN380RERGRI2AYBj6fD6fTid1+6FEfrTaEiIiISPumgakiIiLiFwohIiIi4hcKISIiIuIXCiEiIiLiFwohIiIi4hcKISIiIuIXCiEiIiLiF5YKIXl5eaSmppKSksLo0aN59NFH8Xg8/m5Wk9qwYQNXXXUVo0aNYty4cdx5553k5+cDsHr1aqZPn05ycjInnXQS77zzjp9b23S8Xi+XX345d999d+2x9lpvQUEBd955J6NHj2bkyJGkpqaSnZ0NtM+a161bx6WXXkpKSgonnHACjzzyCG63G2h/9ebn5zN16lSWLl1ae+xwNb733ntMnTqVYcOGcf7557Ny5cqWbvYxaajmzz77jHPOOYfhw4dz0kkn8eyzz+Lz+Wofb8s1N1RvjezsbI4//njefffdesfbcr2HZVjIZZddZtx+++1GWVmZsX37duOMM84w5s6d6+9mNZny8nJj3Lhxxt/+9jejsrLSyM/PN6699lrj+uuvNwoKCoxRo0YZr7/+ulFVVWX88MMPRnJysrF69Wp/N7tJPPPMM0b//v2Nu+66yzAMo13Xe9lllxk33XSTUVhYaBQXFxu/+93vjOuuu65d1uz1eo1x48YZr7zyiuH1eo3du3cbp5xyivHss8+2u3pXrFhhTJkyxejbt6/x448/GoZx+L/HP/74o5GcnGysWLHCcLvdxssvv2yMHj3aKCsr82cpR6yhmtPS0owhQ4YYX331leH1eo3NmzcbkyZNMl588UXDMNp2zQ3VW8Pr9RqXX3650b9/f+O///1v7fG2XO+RsExPSGZmJsuWLeOOO+4gJCSErl27kpqayhtvvOHvpjWZrKws+vfvz0033URgYCDR0dFceOGFLF++nP/9739ERUVx6aWX4nQ6GTt2LGeddVa7qH/JkiX873//4+STT6491l7rXbt2LatXr+bxxx8nIiKCsLAwHn74Yf7whz+0y5oLCwvJycnB5/PVboZlt9sJCQlpV/W+9957/OEPf2DmzJn1jh+uxnfeeYczzjiDESNGEBAQwJVXXkl0dDQff/yxP8polIPVvGvXLi666CImTZqE3W6nd+/eTJ06leXLlwNtt+aD1VvjueeeIzExkY4dO9Y73lbrPVKWCSHp6elERUWRkJBQe6x3795kZWVRVFTkx5Y1nV69evHCCy/gcDhqj3322WcMGjSI9PR0+vbtW+/8Pn36sGHDhpZuZpPKy8vjvvvu4//+7/8ICQmpPd5e612zZg19+vTh7bffZurUqZxwwgk88cQTxMXFtcuao6OjufLKK3niiScYPHgwEyZMoEePHlx55ZXtqt4TTjiBzz//nNNPP73e8cPVuHnz5jb7Hhys5lNOOYV77rmn9ueKigq++eYbBg0aBLTdmg9WL8CPP/7IRx99xIMPPnjAY2213iNlmRBSWlpa70MKqP25rKzMH01qVoZh8PTTT/P1119z3333NVh/cHBwm67d5/Nxxx13cNVVV9G/f/96j7XHesHsGdi4cSPbtm3jvffe4/3332fv3r3cdddd7bJmn89HcHAwf/zjH1m1ahUffvghGRkZzJ49u13VGxcXh9PpPOD44Wpsy+/BwWreX0lJCTfddBPBwcFceeWVQNut+WD15uXlce+99/LUU08RGhp6wONttd4jZZkQ4nK5KC8vr3es5ueGfvFtWUlJCbfccgsLFy7k9ddfp1+/foSEhFBRUVHvvIqKijZd+/PPP09gYCCXX375AY+1x3qB2m2x77vvPsLCwoiNjeXWW29l0aJFGIbR7mr+/PPP+eyzz7jkkksIDAwkKSmJm266ibfeeqvd/o73d7ga2/N7sGXLFi666CI8Hg+vvvoqYWFhQPuq2TAM7rzzTi6//HKOO+64Bs9pT/U2xDIhJCkpiYKCAnJzc2uPZWRkkJiYSHh4uB9b1rS2b9/OBRdcQElJCfPnz6dfv34A9O3bl/T09Hrnbt68maSkJH80s0l88MEHLFu2jJSUFFJSUvjwww/58MMPSUlJaZf1gtkN6/P5qKqqqj1WM2tgwIAB7a7m3bt3186EqeF0OgkICGi3v+P9Ha7GpKSkdvkeLFq0iOnTpzN+/HhefPFFIiMjax9rTzXv3r2bZcuW8dxzz9X+fywrK4uHHnqI66+/Hmhf9TbIv+NiW9bFF19szJw50yguLq6dHTN79mx/N6vJFBQUGBMnTjTuvvtuw+v11nssPz/fSElJMV5++WXD7XYbS5YsMZKTk40lS5b4qbVN76677qqdHdNe63W73cbUqVONm2++2SgpKTHy8vKMK664wrjpppvaZc3p6enGcccdZ/zjH/8wPB6PsX37duPMM880Hn/88XZZr2EY9WZOHK7GmtkyS5YsqZ05MXLkSGPfvn1+rKDx9q955cqVxqBBg4x33nmnwXPbQ80NzY6pMWnSpHqzY9pDvYdiqRCSk5Nj3HzzzcaoUaOMMWPGGI8//rjh8Xj83awm89JLLxl9+/Y1hg4dagwbNqzel2EYxpo1a4wLL7zQSE5ONiZPnlzvL3p7sH8IMYz2W++ePXuMW2+91Rg3bpyRkpJi3HnnnUZhYaFhGO2z5u+//96YPn26MWLECGPixInGX//6V6OystIwjPZZ768/oA5X4/vvv2+ccsopxrBhw4xp06YZq1ataukmH7P9a77++uuNfv36HfD/sKuvvrr2/LZec2NCiGG0/XoPxWYY1fPeRERERFqQZcaEiIiISOuiECIiIiJ+oRAiIiIifqEQIiIiIn6hECIiIiJ+oRAiIiIifqEQIiIiIn6hECIiIiJ+oRAiIiIifqEQIiIiIn6hECIiIiJ+oRAiIiIifvH/j13cIvlcl5MAAAAASUVORK5CYII="
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:48:52,001 | train_utils.py:139 | Best Loss: 0.00036340996361615364, Best epoch: 150\n"
     ]
    }
   ],
   "source": [
    "trained_model = fit(model, X_train, y_train, X_val, y_val)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Prediction on Stort1\n",
      "Prediction on Stort2\n",
      "Prediction on Stort3\n",
      "Prediction on Stort4\n",
      "Prediction on Stort5\n",
      "Prediction on Stort6\n",
      "Prediction on Stort7\n",
      "Prediction on Stort8\n",
      "Prediction on Stort9\n",
      "Prediction on Stort10\n",
      "Prediction on Stort11\n",
      "Prediction on Stort12\n",
      "Prediction on Stort13\n",
      "Prediction on Stort14\n",
      "Prediction on Stort15\n",
      "Prediction on Stort16\n",
      "Prediction on Stort17\n",
      "Prediction on Stort18\n",
      "Prediction on Stort19\n",
      "Prediction on Stort20\n",
      "Prediction on Stort21\n",
      "Prediction on Stort22\n",
      "Prediction on Stort23\n",
      "Prediction on Stort24\n",
      "Prediction on Stort25\n",
      "Prediction on Stort26\n",
      "Prediction on Stort27\n",
      "Prediction on Stort28\n",
      "Prediction on Stort29\n",
      "Prediction on Stort30\n",
      "Prediction on Stort31\n",
      "Prediction on Stort32\n",
      "Prediction on Stort33\n",
      "Prediction on Stort34\n",
      "Prediction on Stort35\n",
      "Prediction on Stort36\n",
      "Prediction on Stort37\n",
      "Prediction on Stort38\n",
      "Prediction on Stort39\n",
      "Prediction on Stort40\n",
      "Prediction on Stort41\n",
      "Prediction on Stort42\n",
      "Prediction on Stort43\n",
      "Prediction on Stort44\n",
      "Prediction on Stort45\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:23,285 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 40289964032.0, rmse: 200723.60108367924, mae: 125681.8359375, r^2: -0.7788777382581666, nrmse: 0.13151296397540416\n",
      "INFO logger 2023-08-24 11:49:23,287 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 24723234816.0, rmse: 157236.2388764117, mae: 110571.2890625, r^2: -0.22262592582840823, nrmse: 0.09770657708343283\n",
      "INFO logger 2023-08-24 11:49:23,290 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 33674432512.0, rmse: 183505.94680282162, mae: 122538.671875, r^2: 0.4160762113496712, nrmse: 0.09607723563746345\n",
      "INFO logger 2023-08-24 11:49:23,291 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 50488958976.0, rmse: 224697.48324358242, mae: 175351.125, r^2: -1.6836632675625949, nrmse: 0.1177805833721818\n",
      "INFO logger 2023-08-24 11:49:23,293 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 5644521472.0, rmse: 75130.03042725325, mae: 53997.53125, r^2: -1.556789542722938, nrmse: 0.19261611604008294\n",
      "INFO logger 2023-08-24 11:49:23,295 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 11328261120.0, rmse: 106434.3042444493, mae: 76032.5625, r^2: -6.706668685842257, nrmse: 0.2506787877220804\n",
      "INFO logger 2023-08-24 11:49:23,296 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 43873271808.0, rmse: 209459.47533592267, mae: 147710.65625, r^2: 0.3273439552815819, nrmse: 0.1027321999309784\n",
      "INFO logger 2023-08-24 11:49:23,297 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 44793638912.0, rmse: 211645.07769376543, mae: 162494.546875, r^2: -1.2628451259265359, nrmse: 0.09760734356982105\n",
      "INFO logger 2023-08-24 11:49:23,299 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 4825954304.0, rmse: 69469.08883813002, mae: 51971.19140625, r^2: -2.120915317445218, nrmse: 0.223696413027919\n",
      "INFO logger 2023-08-24 11:49:23,301 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 5990084096.0, rmse: 77395.63357192704, mae: 65870.71875, r^2: -7.417442679756801, nrmse: 0.23099489487233524\n",
      "INFO logger 2023-08-24 11:49:23,303 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 39809073152.0, rmse: 199522.11193749928, mae: 134703.84375, r^2: 0.13816702665570202, nrmse: 0.12840540458464242\n",
      "INFO logger 2023-08-24 11:49:23,304 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 20462712832.0, rmse: 143047.93892957704, mae: 123603.2890625, r^2: -0.2452605362124025, nrmse: 0.0936706960573446\n",
      "INFO logger 2023-08-24 11:49:23,306 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 16565541888.0, rmse: 128707.19439098967, mae: 100479.2109375, r^2: -0.33918303181092146, nrmse: 0.2320844850136637\n",
      "INFO logger 2023-08-24 11:49:23,307 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 32591966208.0, rmse: 180532.45195255062, mae: 132711.34375, r^2: -7.230433508315098, nrmse: 0.3255131326506836\n",
      "INFO logger 2023-08-24 11:49:23,309 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 17856245760.0, rmse: 133627.26428390277, mae: 89765.90625, r^2: -0.5394774201506214, nrmse: 0.14914059908320157\n",
      "INFO logger 2023-08-24 11:49:23,311 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 14702971904.0, rmse: 121255.81183596933, mae: 103075.3984375, r^2: -1.5703343386097073, nrmse: 0.1318215119933487\n",
      "INFO logger 2023-08-24 11:49:23,312 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 8602363904.0, rmse: 92748.9293954383, mae: 69877.1796875, r^2: -0.8953600660171619, nrmse: 0.17427668414429254\n",
      "INFO logger 2023-08-24 11:49:23,314 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 5290461696.0, rmse: 72735.56005146314, mae: 54137.5703125, r^2: -0.9521190371577748, nrmse: 0.12807475777327215\n",
      "INFO logger 2023-08-24 11:49:23,316 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 45585285120.0, rmse: 213507.10789104892, mae: 129354.6171875, r^2: 0.5074534417978556, nrmse: 0.11262640132965886\n",
      "INFO logger 2023-08-24 11:49:23,317 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 51938045952.0, rmse: 227899.20129741568, mae: 188876.265625, r^2: -0.888961262957509, nrmse: 0.120808239266044\n",
      "INFO logger 2023-08-24 11:49:23,319 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 21848864768.0, rmse: 147813.61496154542, mae: 103468.2890625, r^2: 0.15578648865187184, nrmse: 0.11078699486592705\n",
      "INFO logger 2023-08-24 11:49:23,320 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 22196764672.0, rmse: 148985.78681203118, mae: 136279.90625, r^2: -0.5537931385948192, nrmse: 0.10857073707582089\n",
      "INFO logger 2023-08-24 11:49:23,322 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 19319969792.0, rmse: 138996.29416642734, mae: 98300.0078125, r^2: 0.06529349901412806, nrmse: 0.1400720563134179\n",
      "INFO logger 2023-08-24 11:49:23,324 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 19924336640.0, rmse: 141153.59237369767, mae: 111699.2890625, r^2: -1.0075673450070286, nrmse: 0.1341015760321956\n",
      "INFO logger 2023-08-24 11:49:23,325 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 44850757632.0, rmse: 211779.9745773901, mae: 148694.15625, r^2: 0.3364118127624832, nrmse: 0.10746353554079305\n",
      "INFO logger 2023-08-24 11:49:23,327 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 110695710720.0, rmse: 332709.64927395777, mae: 224094.609375, r^2: -5.489457869039293, nrmse: 0.16732017840629398\n",
      "INFO logger 2023-08-24 11:49:23,333 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 51816476672.0, rmse: 227632.32782713443, mae: 164099.5625, r^2: 0.34651074480599586, nrmse: 0.10882632067287971\n",
      "INFO logger 2023-08-24 11:49:23,334 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 112817356800.0, rmse: 335882.9510409839, mae: 215095.765625, r^2: -3.692888616918407, nrmse: 0.1751545592120429\n",
      "INFO logger 2023-08-24 11:49:23,336 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 20907933696.0, rmse: 144595.75960587504, mae: 86938.5625, r^2: -0.3223702110875766, nrmse: 0.22796863345902396\n",
      "INFO logger 2023-08-24 11:49:23,337 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 13871891456.0, rmse: 117778.9941203439, mae: 82815.5625, r^2: -4.702923826925615, nrmse: 0.2122572377134555\n",
      "INFO logger 2023-08-24 11:49:23,339 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 12718745600.0, rmse: 112777.41617894958, mae: 81376.3203125, r^2: -0.7064080151762715, nrmse: 0.21848448330425072\n",
      "INFO logger 2023-08-24 11:49:23,340 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 28185075712.0, rmse: 167884.11393577416, mae: 110971.0625, r^2: -30.77217736232303, nrmse: 0.3534657731160299\n",
      "INFO logger 2023-08-24 11:49:23,341 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 18380271616.0, rmse: 135573.86037138576, mae: 100549.7109375, r^2: -0.625901704442964, nrmse: 0.15533841106362034\n",
      "INFO logger 2023-08-24 11:49:23,343 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 17367881728.0, rmse: 131787.25935385408, mae: 85497.4765625, r^2: -2.1546697888975035, nrmse: 0.1471615892937456\n",
      "INFO logger 2023-08-24 11:49:23,344 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 29105917952.0, rmse: 170604.56603502732, mae: 118499.3515625, r^2: 0.16660937312148005, nrmse: 0.15741291149004133\n",
      "INFO logger 2023-08-24 11:49:23,346 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 17870776320.0, rmse: 133681.62297039933, mae: 111978.0234375, r^2: -1.507179778266654, nrmse: 0.12987433255996655\n",
      "INFO logger 2023-08-24 11:49:23,347 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 43707289600.0, rmse: 209062.88431952716, mae: 150276.53125, r^2: -0.2051567499624234, nrmse: 0.14353744746141872\n",
      "INFO logger 2023-08-24 11:49:23,349 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 32791412736.0, rmse: 181083.993594133, mae: 150784.953125, r^2: -2.4248981208172227, nrmse: 0.13375248384502064\n",
      "INFO logger 2023-08-24 11:49:23,350 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 38676525056.0, rmse: 196663.481755002, mae: 142111.0625, r^2: 0.49334173583753593, nrmse: 0.09420481064496568\n",
      "INFO logger 2023-08-24 11:49:23,352 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 146348228608.0, rmse: 382554.8700617991, mae: 309291.15625, r^2: -2.847758431364703, nrmse: 0.18281353403227338\n",
      "INFO logger 2023-08-24 11:49:23,354 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 15646464000.0, rmse: 125085.8265352234, mae: 84007.3515625, r^2: 0.11231859569287239, nrmse: 0.16312540729436212\n",
      "INFO logger 2023-08-24 11:49:23,355 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 14961128448.0, rmse: 122315.69174885126, mae: 96956.421875, r^2: -3.7209753668832706, nrmse: 0.1701660869822184\n",
      "INFO logger 2023-08-24 11:49:23,356 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 20149637120.0, rmse: 141949.41746974518, mae: 98255.0, r^2: 0.2454184589440025, nrmse: 0.13762556624232689\n",
      "INFO logger 2023-08-24 11:49:23,358 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 11236444160.0, rmse: 106002.09507363522, mae: 82256.9921875, r^2: -0.7794244026397779, nrmse: 0.1095185509314907\n",
      "INFO logger 2023-08-24 11:49:23,359 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 31251806208.0, rmse: 176781.80395051974, mae: 133780.421875, r^2: 0.4951610768305337, nrmse: 0.12809121203335005\n",
      "INFO logger 2023-08-24 11:49:23,361 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 40765296640.0, rmse: 201904.17687606168, mae: 162073.6875, r^2: -1.8836260020884459, nrmse: 0.15804271928130145\n",
      "INFO logger 2023-08-24 11:49:23,362 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 27361728512.0, rmse: 165413.80991924464, mae: 128076.8125, r^2: 0.04776088681037671, nrmse: 0.12199904630321359\n",
      "INFO logger 2023-08-24 11:49:23,363 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 27262527488.0, rmse: 165113.68049922455, mae: 133943.015625, r^2: -1.731194226890663, nrmse: 0.13042772998659063\n",
      "INFO logger 2023-08-24 11:49:23,366 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 11195121664.0, rmse: 105807.00196111787, mae: 75167.59375, r^2: 0.15535838151374382, nrmse: 0.14939065049885975\n",
      "INFO logger 2023-08-24 11:49:23,367 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 10150571008.0, rmse: 100750.04222331621, mae: 85711.2890625, r^2: -3.407924334566512, nrmse: 0.1522759506944526\n",
      "INFO logger 2023-08-24 11:49:23,369 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 19581865984.0, rmse: 139935.22067013723, mae: 113505.7109375, r^2: -0.64919048557855, nrmse: 0.14041676240470888\n",
      "INFO logger 2023-08-24 11:49:23,370 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 44227477504.0, rmse: 210303.29884240997, mae: 136508.0625, r^2: -5.966173256031466, nrmse: 0.22286851424106405\n",
      "INFO logger 2023-08-24 11:49:23,372 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 46466494464.0, rmse: 215560.88342739738, mae: 146200.25, r^2: 0.18900549226502938, nrmse: 0.11884456629672241\n",
      "INFO logger 2023-08-24 11:49:23,373 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 23850971136.0, rmse: 154437.59625168995, mae: 123822.5546875, r^2: -0.7437896751402346, nrmse: 0.09600715915447854\n",
      "INFO logger 2023-08-24 11:49:23,375 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 48943337472.0, rmse: 221231.41158524482, mae: 166232.0, r^2: -0.4639312453841582, nrmse: 0.16762981014205391\n",
      "INFO logger 2023-08-24 11:49:23,376 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 47036747776.0, rmse: 216879.56975243197, mae: 157075.96875, r^2: -0.667813584512964, nrmse: 0.16236542043330493\n",
      "INFO logger 2023-08-24 11:49:23,378 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 12670283776.0, rmse: 112562.35505709713, mae: 70889.046875, r^2: -0.1481806300297115, nrmse: 0.20766894148589152\n",
      "INFO logger 2023-08-24 11:49:23,379 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 10083829760.0, rmse: 100418.27403416173, mae: 82884.4609375, r^2: -3.39075696939799, nrmse: 0.1977769144792157\n",
      "INFO logger 2023-08-24 11:49:23,381 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 5368140288.0, rmse: 73267.593709634, mae: 49452.828125, r^2: -8.04866380502483, nrmse: 0.16750661818821425\n",
      "INFO logger 2023-08-24 11:49:23,383 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 6411479552.0, rmse: 80071.7150559422, mae: 59988.81640625, r^2: -36.750261476551756, nrmse: 0.18150914367792895\n",
      "INFO logger 2023-08-24 11:49:23,384 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 13818121216.0, rmse: 117550.5049585071, mae: 91425.6015625, r^2: 0.20473836050278227, nrmse: 0.08492178313559033\n",
      "INFO logger 2023-08-24 11:49:23,385 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 36252475392.0, rmse: 190400.82823349274, mae: 137321.3125, r^2: -5.8403638501175665, nrmse: 0.13511749022020825\n",
      "INFO logger 2023-08-24 11:49:23,387 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 22991355904.0, rmse: 151629.00746229265, mae: 104944.53125, r^2: -0.24015839629627922, nrmse: 0.1312981176155789\n",
      "INFO logger 2023-08-24 11:49:23,388 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 45014167552.0, rmse: 212165.4249683487, mae: 143552.0625, r^2: -14.52234729620633, nrmse: 0.18543021278315622\n",
      "INFO logger 2023-08-24 11:49:23,390 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 3727497472.0, rmse: 61053.234738218416, mae: 48816.5390625, r^2: -4.777697278039981, nrmse: 0.23864078111908477\n",
      "INFO logger 2023-08-24 11:49:23,391 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 2650944000.0, rmse: 51487.318827066534, mae: 43470.8359375, r^2: -5.734662153573847, nrmse: 0.19317952890880724\n",
      "INFO logger 2023-08-24 11:49:23,393 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 20632944640.0, rmse: 143641.72318654493, mae: 95931.9921875, r^2: -0.8638293183866872, nrmse: 0.15031975339080828\n",
      "INFO logger 2023-08-24 11:49:23,394 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 10337351680.0, rmse: 101672.76764207808, mae: 82149.6953125, r^2: -1.9371099679655264, nrmse: 0.10417631326951225\n",
      "INFO logger 2023-08-24 11:49:23,396 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 34894512128.0, rmse: 186800.72839258416, mae: 104624.96875, r^2: 0.3339193924372731, nrmse: 0.1978671145768036\n",
      "INFO logger 2023-08-24 11:49:23,398 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 8350986752.0, rmse: 91383.73351970251, mae: 73444.59375, r^2: -0.7362828898914648, nrmse: 0.11950051729422745\n",
      "INFO logger 2023-08-24 11:49:23,399 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 4395631104.0, rmse: 66299.55583561628, mae: 49336.2734375, r^2: -0.9956538250556144, nrmse: 0.1651855166414639\n",
      "INFO logger 2023-08-24 11:49:23,401 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 5045637632.0, rmse: 71032.65187222, mae: 61860.828125, r^2: -41.998251224716064, nrmse: 0.2210093442336893\n",
      "INFO logger 2023-08-24 11:49:23,402 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 4946613760.0, rmse: 70332.16732050847, mae: 54107.05859375, r^2: -10.900643726429452, nrmse: 0.136603438837544\n",
      "INFO logger 2023-08-24 11:49:23,404 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 7633102336.0, rmse: 87367.62750584452, mae: 66064.8046875, r^2: -24.567241089993004, nrmse: 0.1648539309946133\n",
      "INFO logger 2023-08-24 11:49:23,405 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 5569184256.0, rmse: 74626.96735095163, mae: 53201.35546875, r^2: -5.684356027512933, nrmse: 0.2045986669249316\n",
      "INFO logger 2023-08-24 11:49:23,406 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 2770975232.0, rmse: 52640.053495413544, mae: 43909.78125, r^2: -2.216219740871178, nrmse: 0.12111541801653525\n",
      "INFO logger 2023-08-24 11:49:23,408 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 31719233536.0, rmse: 178098.94310747608, mae: 117524.5859375, r^2: 0.2624478997193971, nrmse: 0.12708120748376187\n",
      "INFO logger 2023-08-24 11:49:23,409 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 28895287296.0, rmse: 169986.13854076454, mae: 135390.453125, r^2: -2.4662658766410908, nrmse: 0.11533112300678137\n",
      "INFO logger 2023-08-24 11:49:23,411 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 15782274048.0, rmse: 125627.52106127064, mae: 87399.53125, r^2: -0.22044195015284918, nrmse: 0.13124894624100963\n",
      "INFO logger 2023-08-24 11:49:23,413 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 17478430720.0, rmse: 132206.01620198681, mae: 95796.078125, r^2: -1.453142031129567, nrmse: 0.1448173837602273\n",
      "INFO logger 2023-08-24 11:49:23,415 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 31227408384.0, rmse: 176712.78500436802, mae: 123545.5703125, r^2: 0.002178063126473284, nrmse: 0.14303559139409588\n",
      "INFO logger 2023-08-24 11:49:23,416 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 40458960896.0, rmse: 201144.12965831242, mae: 162642.53125, r^2: -4.033957131822105, nrmse: 0.16077321236156478\n",
      "INFO logger 2023-08-24 11:49:23,417 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 6249181696.0, rmse: 79051.76592587923, mae: 60397.5546875, r^2: -1.8335748168163253, nrmse: 0.14476199496687106\n",
      "INFO logger 2023-08-24 11:49:23,419 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 10068831232.0, rmse: 100343.56597211403, mae: 71781.0859375, r^2: -4.027541407436449, nrmse: 0.16721889681714044\n",
      "INFO logger 2023-08-24 11:49:23,420 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 6559638528.0, rmse: 80991.59541582078, mae: 62960.703125, r^2: -2.424665277010021, nrmse: 0.12738192815829205\n",
      "INFO logger 2023-08-24 11:49:23,422 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 21656768512.0, rmse: 147162.38823829952, mae: 107641.2265625, r^2: -30.196519820708787, nrmse: 0.23025502154672625\n",
      "INFO logger 2023-08-24 11:49:23,423 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 5886613504.0, rmse: 76724.2693285508, mae: 58955.82421875, r^2: -21.650941491272857, nrmse: 0.2639012612190142\n",
      "INFO logger 2023-08-24 11:49:23,424 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 2862393344.0, rmse: 53501.33964677894, mae: 41755.52734375, r^2: -23.429247473346006, nrmse: 0.16744425905491098\n",
      "INFO logger 2023-08-24 11:49:23,426 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 22282995712.0, rmse: 149274.89980569406, mae: 91769.34375, r^2: -0.2933248739887595, nrmse: 0.19042438314673754\n",
      "INFO logger 2023-08-24 11:49:23,427 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 15244080128.0, rmse: 123466.91916460863, mae: 86797.0859375, r^2: -3.3314822286808434, nrmse: 0.1605167152043263\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Final Prediction on Stort1 (Inference Stage)\n",
      "[Train]: mse: 40289964032.0, rmse: 200723.60108367924, mae 125681.8359375, r2: -0.7788777382581666, nrmse: 0.13151296397540416\n",
      "[Val]: mse: 24723234816.0, rmse: 157236.2388764117, mae 110571.2890625, r2: -0.22262592582840823, nrmse: 0.09770657708343283\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort2 (Inference Stage)\n",
      "[Train]: mse: 33674432512.0, rmse: 183505.94680282162, mae 122538.671875, r2: 0.4160762113496712, nrmse: 0.09607723563746345\n",
      "[Val]: mse: 50488958976.0, rmse: 224697.48324358242, mae 175351.125, r2: -1.6836632675625949, nrmse: 0.1177805833721818\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort3 (Inference Stage)\n",
      "[Train]: mse: 5644521472.0, rmse: 75130.03042725325, mae 53997.53125, r2: -1.556789542722938, nrmse: 0.19261611604008294\n",
      "[Val]: mse: 11328261120.0, rmse: 106434.3042444493, mae 76032.5625, r2: -6.706668685842257, nrmse: 0.2506787877220804\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort4 (Inference Stage)\n",
      "[Train]: mse: 43873271808.0, rmse: 209459.47533592267, mae 147710.65625, r2: 0.3273439552815819, nrmse: 0.1027321999309784\n",
      "[Val]: mse: 44793638912.0, rmse: 211645.07769376543, mae 162494.546875, r2: -1.2628451259265359, nrmse: 0.09760734356982105\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort5 (Inference Stage)\n",
      "[Train]: mse: 4825954304.0, rmse: 69469.08883813002, mae 51971.19140625, r2: -2.120915317445218, nrmse: 0.223696413027919\n",
      "[Val]: mse: 5990084096.0, rmse: 77395.63357192704, mae 65870.71875, r2: -7.417442679756801, nrmse: 0.23099489487233524\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort6 (Inference Stage)\n",
      "[Train]: mse: 39809073152.0, rmse: 199522.11193749928, mae 134703.84375, r2: 0.13816702665570202, nrmse: 0.12840540458464242\n",
      "[Val]: mse: 20462712832.0, rmse: 143047.93892957704, mae 123603.2890625, r2: -0.2452605362124025, nrmse: 0.0936706960573446\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort7 (Inference Stage)\n",
      "[Train]: mse: 16565541888.0, rmse: 128707.19439098967, mae 100479.2109375, r2: -0.33918303181092146, nrmse: 0.2320844850136637\n",
      "[Val]: mse: 32591966208.0, rmse: 180532.45195255062, mae 132711.34375, r2: -7.230433508315098, nrmse: 0.3255131326506836\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort8 (Inference Stage)\n",
      "[Train]: mse: 17856245760.0, rmse: 133627.26428390277, mae 89765.90625, r2: -0.5394774201506214, nrmse: 0.14914059908320157\n",
      "[Val]: mse: 14702971904.0, rmse: 121255.81183596933, mae 103075.3984375, r2: -1.5703343386097073, nrmse: 0.1318215119933487\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort9 (Inference Stage)\n",
      "[Train]: mse: 8602363904.0, rmse: 92748.9293954383, mae 69877.1796875, r2: -0.8953600660171619, nrmse: 0.17427668414429254\n",
      "[Val]: mse: 5290461696.0, rmse: 72735.56005146314, mae 54137.5703125, r2: -0.9521190371577748, nrmse: 0.12807475777327215\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort10 (Inference Stage)\n",
      "[Train]: mse: 45585285120.0, rmse: 213507.10789104892, mae 129354.6171875, r2: 0.5074534417978556, nrmse: 0.11262640132965886\n",
      "[Val]: mse: 51938045952.0, rmse: 227899.20129741568, mae 188876.265625, r2: -0.888961262957509, nrmse: 0.120808239266044\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort11 (Inference Stage)\n",
      "[Train]: mse: 21848864768.0, rmse: 147813.61496154542, mae 103468.2890625, r2: 0.15578648865187184, nrmse: 0.11078699486592705\n",
      "[Val]: mse: 22196764672.0, rmse: 148985.78681203118, mae 136279.90625, r2: -0.5537931385948192, nrmse: 0.10857073707582089\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort12 (Inference Stage)\n",
      "[Train]: mse: 19319969792.0, rmse: 138996.29416642734, mae 98300.0078125, r2: 0.06529349901412806, nrmse: 0.1400720563134179\n",
      "[Val]: mse: 19924336640.0, rmse: 141153.59237369767, mae 111699.2890625, r2: -1.0075673450070286, nrmse: 0.1341015760321956\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort13 (Inference Stage)\n",
      "[Train]: mse: 44850757632.0, rmse: 211779.9745773901, mae 148694.15625, r2: 0.3364118127624832, nrmse: 0.10746353554079305\n",
      "[Val]: mse: 110695710720.0, rmse: 332709.64927395777, mae 224094.609375, r2: -5.489457869039293, nrmse: 0.16732017840629398\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort14 (Inference Stage)\n",
      "[Train]: mse: 51816476672.0, rmse: 227632.32782713443, mae 164099.5625, r2: 0.34651074480599586, nrmse: 0.10882632067287971\n",
      "[Val]: mse: 112817356800.0, rmse: 335882.9510409839, mae 215095.765625, r2: -3.692888616918407, nrmse: 0.1751545592120429\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort15 (Inference Stage)\n",
      "[Train]: mse: 20907933696.0, rmse: 144595.75960587504, mae 86938.5625, r2: -0.3223702110875766, nrmse: 0.22796863345902396\n",
      "[Val]: mse: 13871891456.0, rmse: 117778.9941203439, mae 82815.5625, r2: -4.702923826925615, nrmse: 0.2122572377134555\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort16 (Inference Stage)\n",
      "[Train]: mse: 12718745600.0, rmse: 112777.41617894958, mae 81376.3203125, r2: -0.7064080151762715, nrmse: 0.21848448330425072\n",
      "[Val]: mse: 28185075712.0, rmse: 167884.11393577416, mae 110971.0625, r2: -30.77217736232303, nrmse: 0.3534657731160299\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort17 (Inference Stage)\n",
      "[Train]: mse: 18380271616.0, rmse: 135573.86037138576, mae 100549.7109375, r2: -0.625901704442964, nrmse: 0.15533841106362034\n",
      "[Val]: mse: 17367881728.0, rmse: 131787.25935385408, mae 85497.4765625, r2: -2.1546697888975035, nrmse: 0.1471615892937456\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort18 (Inference Stage)\n",
      "[Train]: mse: 29105917952.0, rmse: 170604.56603502732, mae 118499.3515625, r2: 0.16660937312148005, nrmse: 0.15741291149004133\n",
      "[Val]: mse: 17870776320.0, rmse: 133681.62297039933, mae 111978.0234375, r2: -1.507179778266654, nrmse: 0.12987433255996655\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort19 (Inference Stage)\n",
      "[Train]: mse: 43707289600.0, rmse: 209062.88431952716, mae 150276.53125, r2: -0.2051567499624234, nrmse: 0.14353744746141872\n",
      "[Val]: mse: 32791412736.0, rmse: 181083.993594133, mae 150784.953125, r2: -2.4248981208172227, nrmse: 0.13375248384502064\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort20 (Inference Stage)\n",
      "[Train]: mse: 38676525056.0, rmse: 196663.481755002, mae 142111.0625, r2: 0.49334173583753593, nrmse: 0.09420481064496568\n",
      "[Val]: mse: 146348228608.0, rmse: 382554.8700617991, mae 309291.15625, r2: -2.847758431364703, nrmse: 0.18281353403227338\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort21 (Inference Stage)\n",
      "[Train]: mse: 15646464000.0, rmse: 125085.8265352234, mae 84007.3515625, r2: 0.11231859569287239, nrmse: 0.16312540729436212\n",
      "[Val]: mse: 14961128448.0, rmse: 122315.69174885126, mae 96956.421875, r2: -3.7209753668832706, nrmse: 0.1701660869822184\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort22 (Inference Stage)\n",
      "[Train]: mse: 20149637120.0, rmse: 141949.41746974518, mae 98255.0, r2: 0.2454184589440025, nrmse: 0.13762556624232689\n",
      "[Val]: mse: 11236444160.0, rmse: 106002.09507363522, mae 82256.9921875, r2: -0.7794244026397779, nrmse: 0.1095185509314907\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort23 (Inference Stage)\n",
      "[Train]: mse: 31251806208.0, rmse: 176781.80395051974, mae 133780.421875, r2: 0.4951610768305337, nrmse: 0.12809121203335005\n",
      "[Val]: mse: 40765296640.0, rmse: 201904.17687606168, mae 162073.6875, r2: -1.8836260020884459, nrmse: 0.15804271928130145\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort24 (Inference Stage)\n",
      "[Train]: mse: 27361728512.0, rmse: 165413.80991924464, mae 128076.8125, r2: 0.04776088681037671, nrmse: 0.12199904630321359\n",
      "[Val]: mse: 27262527488.0, rmse: 165113.68049922455, mae 133943.015625, r2: -1.731194226890663, nrmse: 0.13042772998659063\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort25 (Inference Stage)\n",
      "[Train]: mse: 11195121664.0, rmse: 105807.00196111787, mae 75167.59375, r2: 0.15535838151374382, nrmse: 0.14939065049885975\n",
      "[Val]: mse: 10150571008.0, rmse: 100750.04222331621, mae 85711.2890625, r2: -3.407924334566512, nrmse: 0.1522759506944526\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort26 (Inference Stage)\n",
      "[Train]: mse: 19581865984.0, rmse: 139935.22067013723, mae 113505.7109375, r2: -0.64919048557855, nrmse: 0.14041676240470888\n",
      "[Val]: mse: 44227477504.0, rmse: 210303.29884240997, mae 136508.0625, r2: -5.966173256031466, nrmse: 0.22286851424106405\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort27 (Inference Stage)\n",
      "[Train]: mse: 46466494464.0, rmse: 215560.88342739738, mae 146200.25, r2: 0.18900549226502938, nrmse: 0.11884456629672241\n",
      "[Val]: mse: 23850971136.0, rmse: 154437.59625168995, mae 123822.5546875, r2: -0.7437896751402346, nrmse: 0.09600715915447854\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort28 (Inference Stage)\n",
      "[Train]: mse: 48943337472.0, rmse: 221231.41158524482, mae 166232.0, r2: -0.4639312453841582, nrmse: 0.16762981014205391\n",
      "[Val]: mse: 47036747776.0, rmse: 216879.56975243197, mae 157075.96875, r2: -0.667813584512964, nrmse: 0.16236542043330493\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort29 (Inference Stage)\n",
      "[Train]: mse: 12670283776.0, rmse: 112562.35505709713, mae 70889.046875, r2: -0.1481806300297115, nrmse: 0.20766894148589152\n",
      "[Val]: mse: 10083829760.0, rmse: 100418.27403416173, mae 82884.4609375, r2: -3.39075696939799, nrmse: 0.1977769144792157\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort30 (Inference Stage)\n",
      "[Train]: mse: 5368140288.0, rmse: 73267.593709634, mae 49452.828125, r2: -8.04866380502483, nrmse: 0.16750661818821425\n",
      "[Val]: mse: 6411479552.0, rmse: 80071.7150559422, mae 59988.81640625, r2: -36.750261476551756, nrmse: 0.18150914367792895\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort31 (Inference Stage)\n",
      "[Train]: mse: 13818121216.0, rmse: 117550.5049585071, mae 91425.6015625, r2: 0.20473836050278227, nrmse: 0.08492178313559033\n",
      "[Val]: mse: 36252475392.0, rmse: 190400.82823349274, mae 137321.3125, r2: -5.8403638501175665, nrmse: 0.13511749022020825\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort32 (Inference Stage)\n",
      "[Train]: mse: 22991355904.0, rmse: 151629.00746229265, mae 104944.53125, r2: -0.24015839629627922, nrmse: 0.1312981176155789\n",
      "[Val]: mse: 45014167552.0, rmse: 212165.4249683487, mae 143552.0625, r2: -14.52234729620633, nrmse: 0.18543021278315622\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort33 (Inference Stage)\n",
      "[Train]: mse: 3727497472.0, rmse: 61053.234738218416, mae 48816.5390625, r2: -4.777697278039981, nrmse: 0.23864078111908477\n",
      "[Val]: mse: 2650944000.0, rmse: 51487.318827066534, mae 43470.8359375, r2: -5.734662153573847, nrmse: 0.19317952890880724\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort34 (Inference Stage)\n",
      "[Train]: mse: 20632944640.0, rmse: 143641.72318654493, mae 95931.9921875, r2: -0.8638293183866872, nrmse: 0.15031975339080828\n",
      "[Val]: mse: 10337351680.0, rmse: 101672.76764207808, mae 82149.6953125, r2: -1.9371099679655264, nrmse: 0.10417631326951225\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort35 (Inference Stage)\n",
      "[Train]: mse: 34894512128.0, rmse: 186800.72839258416, mae 104624.96875, r2: 0.3339193924372731, nrmse: 0.1978671145768036\n",
      "[Val]: mse: 8350986752.0, rmse: 91383.73351970251, mae 73444.59375, r2: -0.7362828898914648, nrmse: 0.11950051729422745\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort36 (Inference Stage)\n",
      "[Train]: mse: 4395631104.0, rmse: 66299.55583561628, mae 49336.2734375, r2: -0.9956538250556144, nrmse: 0.1651855166414639\n",
      "[Val]: mse: 5045637632.0, rmse: 71032.65187222, mae 61860.828125, r2: -41.998251224716064, nrmse: 0.2210093442336893\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort37 (Inference Stage)\n",
      "[Train]: mse: 4946613760.0, rmse: 70332.16732050847, mae 54107.05859375, r2: -10.900643726429452, nrmse: 0.136603438837544\n",
      "[Val]: mse: 7633102336.0, rmse: 87367.62750584452, mae 66064.8046875, r2: -24.567241089993004, nrmse: 0.1648539309946133\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort38 (Inference Stage)\n",
      "[Train]: mse: 5569184256.0, rmse: 74626.96735095163, mae 53201.35546875, r2: -5.684356027512933, nrmse: 0.2045986669249316\n",
      "[Val]: mse: 2770975232.0, rmse: 52640.053495413544, mae 43909.78125, r2: -2.216219740871178, nrmse: 0.12111541801653525\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort39 (Inference Stage)\n",
      "[Train]: mse: 31719233536.0, rmse: 178098.94310747608, mae 117524.5859375, r2: 0.2624478997193971, nrmse: 0.12708120748376187\n",
      "[Val]: mse: 28895287296.0, rmse: 169986.13854076454, mae 135390.453125, r2: -2.4662658766410908, nrmse: 0.11533112300678137\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort40 (Inference Stage)\n",
      "[Train]: mse: 15782274048.0, rmse: 125627.52106127064, mae 87399.53125, r2: -0.22044195015284918, nrmse: 0.13124894624100963\n",
      "[Val]: mse: 17478430720.0, rmse: 132206.01620198681, mae 95796.078125, r2: -1.453142031129567, nrmse: 0.1448173837602273\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort41 (Inference Stage)\n",
      "[Train]: mse: 31227408384.0, rmse: 176712.78500436802, mae 123545.5703125, r2: 0.002178063126473284, nrmse: 0.14303559139409588\n",
      "[Val]: mse: 40458960896.0, rmse: 201144.12965831242, mae 162642.53125, r2: -4.033957131822105, nrmse: 0.16077321236156478\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort42 (Inference Stage)\n",
      "[Train]: mse: 6249181696.0, rmse: 79051.76592587923, mae 60397.5546875, r2: -1.8335748168163253, nrmse: 0.14476199496687106\n",
      "[Val]: mse: 10068831232.0, rmse: 100343.56597211403, mae 71781.0859375, r2: -4.027541407436449, nrmse: 0.16721889681714044\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort43 (Inference Stage)\n",
      "[Train]: mse: 6559638528.0, rmse: 80991.59541582078, mae 62960.703125, r2: -2.424665277010021, nrmse: 0.12738192815829205\n",
      "[Val]: mse: 21656768512.0, rmse: 147162.38823829952, mae 107641.2265625, r2: -30.196519820708787, nrmse: 0.23025502154672625\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort44 (Inference Stage)\n",
      "[Train]: mse: 5886613504.0, rmse: 76724.2693285508, mae 58955.82421875, r2: -21.650941491272857, nrmse: 0.2639012612190142\n",
      "[Val]: mse: 2862393344.0, rmse: 53501.33964677894, mae 41755.52734375, r2: -23.429247473346006, nrmse: 0.16744425905491098\n",
      "\n",
      "\n",
      "\n",
      "Final Prediction on Stort45 (Inference Stage)\n",
      "[Train]: mse: 22282995712.0, rmse: 149274.89980569406, mae 91769.34375, r2: -0.2933248739887595, nrmse: 0.19042438314673754\n",
      "[Val]: mse: 15244080128.0, rmse: 123466.91916460863, mae 86797.0859375, r2: -3.3314822286808434, nrmse: 0.1605167152043263\n",
      "\n",
      "\n",
      "总长度：45\n",
      "[Val]: val_mse_avg: 28335258624.0, val_rmse_avg: 152011.9906461965, val_mae_avg: 115689.5796875, val_r2_avg: -6.863694269425029, val_nrmse_avg: 0.16246280184404138\n",
      "\n",
      "\n",
      "[val_nrmse_all]: [0.09770657708343283, 0.1177805833721818, 0.2506787877220804, 0.09760734356982105, 0.23099489487233524, 0.0936706960573446, 0.3255131326506836, 0.1318215119933487, 0.12807475777327215, 0.120808239266044, 0.10857073707582089, 0.1341015760321956, 0.16732017840629398, 0.1751545592120429, 0.2122572377134555, 0.3534657731160299, 0.1471615892937456, 0.12987433255996655, 0.13375248384502064, 0.18281353403227338, 0.1701660869822184, 0.1095185509314907, 0.15804271928130145, 0.13042772998659063, 0.1522759506944526, 0.22286851424106405, 0.09600715915447854, 0.16236542043330493, 0.1977769144792157, 0.18150914367792895, 0.13511749022020825, 0.18543021278315622, 0.19317952890880724, 0.10417631326951225, 0.11950051729422745, 0.2210093442336893, 0.1648539309946133, 0.12111541801653525, 0.11533112300678137, 0.1448173837602273, 0.16077321236156478, 0.16721889681714044, 0.23025502154672625, 0.16744425905491098, 0.1605167152043263], \n"
     ]
    }
   ],
   "source": [
    "# 验证集合的画图\n",
    "def transform_preds(y_pred_train, y_pred_val):\n",
    "    if not isinstance(y_pred_train, np.ndarray):\n",
    "        y_pred_train = y_pred_train.cpu().numpy()\n",
    "    if not isinstance(y_pred_val, np.ndarray):\n",
    "        y_pred_val = y_pred_val.cpu().numpy()\n",
    "    return y_pred_train, y_pred_val\n",
    "\n",
    "\n",
    "def round_predictions(y_pred_train, y_pred_val, dims):\n",
    "    # round to closest integer\n",
    "    if dims is None or len(dims) == 0:\n",
    "        return y_pred_train, y_pred_val\n",
    "    for dim in dims:\n",
    "        y_pred_train[:, dim] = np.rint(y_pred_train[:, dim])\n",
    "        y_pred_val[:, dim] = np.rint(y_pred_val[:, dim])\n",
    "    return y_pred_train, y_pred_val\n",
    "\n",
    "\n",
    "def inverse_transform(y_train, y_val, y_pred_train, y_pred_val,\n",
    "                      y_scaler=None,\n",
    "                      round_preds=False, dims=None):\n",
    "    y_pred_train, y_pred_val = transform_preds(y_pred_train, y_pred_val)\n",
    "\n",
    "    if y_scaler is not None:\n",
    "        y_train = y_scaler.inverse_transform(y_train)\n",
    "        y_val = y_scaler.inverse_transform(y_val)\n",
    "        y_pred_train = y_scaler.inverse_transform(y_pred_train)\n",
    "        y_pred_val = y_scaler.inverse_transform(y_pred_val)\n",
    "\n",
    "    # to zeroes\n",
    "    y_pred_train[y_pred_train < 0.] = 0.\n",
    "    y_pred_val[y_pred_val < 0.] = 0.\n",
    "\n",
    "    if round_preds:\n",
    "        y_pred_train, y_pred_val = round_predictions(y_pred_train, y_pred_val, dims)\n",
    "\n",
    "    return y_train, y_val, y_pred_train, y_pred_val\n",
    "\n",
    "\n",
    "def make_plot(y_true, y_pred,\n",
    "              title,\n",
    "              feature_names=None,\n",
    "              client=None):\n",
    "    if feature_names is None:\n",
    "        feature_names = [f\"feature_{i}\" for i in range(y_pred.shape[1])]\n",
    "    assert len(feature_names) == y_pred.shape[1]\n",
    "\n",
    "    for i in range(y_pred.shape[1]):\n",
    "        plt.figure(figsize=(8, 6))\n",
    "        plt.ticklabel_format(style='plain')\n",
    "        plt.plot(y_true[:, i], label=\"Actual\")\n",
    "        plt.plot(y_pred[:, i], label=\"Predicted\")\n",
    "        if client is not None:\n",
    "            plt.title(f\"[{client} {title}] {feature_names[i]} prediction\")\n",
    "        else:\n",
    "            plt.title(f\"[{title}] {feature_names[i]} prediction\")\n",
    "        plt.legend()\n",
    "        plt.show()\n",
    "        plt.close()\n",
    "\n",
    "\n",
    "### During the inference stage we inverse the transformations we applied during preprocessing\n",
    "def inference(\n",
    "        model,  # the global model\n",
    "        client_X_train,  # train data per client\n",
    "        client_y_train,\n",
    "        client_X_val,  # val data per client\n",
    "        client_y_val,\n",
    "        exogenous_data_train,  # exogenous data per client\n",
    "        exogenous_data_val,\n",
    "        y_scaler,  # the scalers used to transform the targets\n",
    "        idxs=[1],\n",
    "        apply_round=True,  # round to closest integer\n",
    "        round_dimensions=[0],  # the dimensions to apply rounding\n",
    "        plot=False,  # plot predictions\n",
    "):\n",
    "    # load per client data to torch\n",
    "    train_loaders, val_loaders = [], []\n",
    "\n",
    "    # get data per client\n",
    "    for client in client_X_train:\n",
    "        if client == \"all\":\n",
    "            continue\n",
    "        # assert client in list(y_scalers.keys())\n",
    "        if exogenous_data_train is not None:\n",
    "            tmp_exogenous_data_train = exogenous_data_train[client]\n",
    "            tmp_exogenous_data_val = exogenous_data_val[client]\n",
    "        else:\n",
    "            tmp_exogenous_data_train = None\n",
    "            tmp_exogenous_data_val = None\n",
    "\n",
    "        num_features = len(client_X_train[client][0][0])\n",
    "\n",
    "        # to torch loader\n",
    "        train_loaders.append(\n",
    "            to_torch_dataset(\n",
    "                client_X_train[client], client_y_train[client],\n",
    "                num_lags=args.num_lags,\n",
    "                num_features=num_features,\n",
    "                exogenous_data=tmp_exogenous_data_train,\n",
    "                indices=idxs,\n",
    "                batch_size=1,\n",
    "                shuffle=False\n",
    "            )\n",
    "        )\n",
    "        val_loaders.append(\n",
    "            to_torch_dataset(\n",
    "                client_X_val[client], client_y_val[client],\n",
    "                num_lags=args.num_lags,\n",
    "                exogenous_data=tmp_exogenous_data_val,\n",
    "                indices=idxs,\n",
    "                batch_size=1,\n",
    "                shuffle=False\n",
    "            )\n",
    "\n",
    "        )\n",
    "\n",
    "    # get client ids\n",
    "    cids = [k for k in client_X_train.keys() if k != \"all\"]\n",
    "\n",
    "    # predict per client using the global model\n",
    "    y_preds_train, y_preds_val = dict(), dict()\n",
    "    for cid, train_loader, val_loader in zip(cids, train_loaders, val_loaders):\n",
    "        print(f\"Prediction on {cid}\")\n",
    "        train_mse, train_rmse, train_mae, train_r2, train_nrmse, y_pred_train = test(\n",
    "            model, train_loader, None, device=device\n",
    "        )\n",
    "        val_mse, val_rmse, val_mae, val_r2, val_nrmse, y_pred_val = test(\n",
    "            model, val_loader, None, device=device\n",
    "        )\n",
    "        y_preds_train[cid] = y_pred_train\n",
    "        y_preds_val[cid] = y_pred_val\n",
    "\n",
    "    # 计算平均预测效果\n",
    "    val_mse_avg, val_rmse_avg, val_mae_avg, val_r2_avg, val_nrmse_avg = 0, 0, 0, 0, 0\n",
    "    # 所有的val nrmse 结果\n",
    "    val_nrmse_all = []\n",
    "    for cid in cids:\n",
    "        y_train, y_val = client_y_train[cid], client_y_val[cid]\n",
    "        y_pred_train, y_pred_val = y_preds_train[cid], y_preds_val[cid]\n",
    "\n",
    "        # y_scaler = y_scalers[cid]\n",
    "        y_train, y_val, y_pred_train, y_pred_val = inverse_transform(\n",
    "            y_train, y_val, y_pred_train, y_pred_val,\n",
    "            y_scaler, round_preds=apply_round, dims=round_dimensions\n",
    "        )\n",
    "        train_mse, train_rmse, train_mae, train_r2, train_nrmse, train_res_per_dim = accumulate_metric(\n",
    "            y_train, y_pred_train, True, return_all=True\n",
    "        )\n",
    "        val_mse, val_rmse, val_mae, val_r2, val_nrmse, val_res_per_dim = accumulate_metric(\n",
    "            y_val, y_pred_val, True, return_all=True\n",
    "        )\n",
    "\n",
    "        print(f\"\\nFinal Prediction on {cid} (Inference Stage)\")\n",
    "        print(f\"[Train]: mse: {train_mse}, \"\n",
    "              f\"rmse: {train_rmse}, mae {train_mae}, r2: {train_r2}, nrmse: {train_nrmse}\")\n",
    "        print(f\"[Val]: mse: {val_mse}, \"\n",
    "              f\"rmse: {val_rmse}, mae {val_mae}, r2: {val_r2}, nrmse: {val_nrmse}\\n\\n\")\n",
    "        val_mse_avg = val_mse_avg + val_mse\n",
    "        val_rmse_avg = val_rmse_avg + val_rmse\n",
    "        val_mae_avg = val_mae_avg + val_mae\n",
    "        val_r2_avg = val_r2_avg + val_r2\n",
    "        val_nrmse_avg = val_nrmse_avg + val_nrmse\n",
    "        val_nrmse_all.append(val_nrmse)\n",
    "        if plot:\n",
    "            make_plot(y_train, y_pred_train, title=\"Train\", feature_names=args.targets, client=cid)\n",
    "            make_plot(y_val, y_pred_val, title=\"Val\", feature_names=args.targets, client=cid)\n",
    "\n",
    "    print(\"总长度：\" + str(len(cids)))\n",
    "    val_mse_avg = val_mse_avg / len(cids)\n",
    "    val_rmse_avg = val_rmse_avg / len(cids)\n",
    "    val_mae_avg = val_mae_avg / len(cids)\n",
    "    val_r2_avg = val_r2_avg / len(cids)\n",
    "    val_nrmse_avg = val_nrmse_avg / len(cids)\n",
    "    print(f\"[Val]: val_mse_avg: {val_mse_avg}, \"\n",
    "          f\"val_rmse_avg: {val_rmse_avg}, val_mae_avg: {val_mae_avg}, val_r2_avg: {val_r2_avg}, val_nrmse_avg: {val_nrmse_avg}\\n\\n\")\n",
    "    print(f\"[val_nrmse_all]: {val_nrmse_all}, \")\n",
    "\n",
    "\n",
    "inference(\n",
    "    trained_model,\n",
    "    client_X_train,\n",
    "    client_y_train,\n",
    "    client_X_val,\n",
    "    client_y_val,\n",
    "    exogenous_data_train,\n",
    "    exogenous_data_val,\n",
    "    y_scaler\n",
    ")"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "start_time": "2023-08-24T11:48:52.022935Z",
     "end_time": "2023-08-24T11:49:23.446671Z"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:23,607 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 23939196928.0, rmse: 154722.9683272655, mae: 118810.890625, r^2: -2.4203734123386855, nrmse: 0.09932302873080498\n",
      "INFO logger 2023-08-24 11:49:23,754 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 13116719104.0, rmse: 114528.24587847314, mae: 94931.5234375, r^2: -1.3446266461118443, nrmse: 0.06105293856935637\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort1\n",
      "[Test]: mse: 23939196928.0, rmse: 154722.9683272655, mae 118810.890625, r2: -2.4203734123386855, nrmse: 0.09932302873080498\n",
      "\n",
      "\n",
      "Final Prediction in Stort2\n",
      "[Test]: mse: 13116719104.0, rmse: 114528.24587847314, mae 94931.5234375, r2: -1.3446266461118443, nrmse: 0.06105293856935637\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:23,901 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 8268451328.0, rmse: 90931.02511244443, mae: 79568.65625, r^2: -40.720653454204495, nrmse: 0.2210259069618412\n",
      "INFO logger 2023-08-24 11:49:24,041 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 27224330240.0, rmse: 164997.97041175992, mae: 140342.578125, r^2: -6.270712946076947, nrmse: 0.07711210691877492\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort3\n",
      "[Test]: mse: 8268451328.0, rmse: 90931.02511244443, mae 79568.65625, r2: -40.720653454204495, nrmse: 0.2210259069618412\n",
      "\n",
      "\n",
      "Final Prediction in Stort4\n",
      "[Test]: mse: 27224330240.0, rmse: 164997.97041175992, mae 140342.578125, r2: -6.270712946076947, nrmse: 0.07711210691877492\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:24,170 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 10345932800.0, rmse: 101714.95858525431, mae: 78669.3984375, r^2: -49.16805001046513, nrmse: 0.31595300606061594\n",
      "INFO logger 2023-08-24 11:49:24,309 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 6753421312.0, rmse: 82179.20242981189, mae: 71091.171875, r^2: 0.5046004806547868, nrmse: 0.053474890531214646\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort5\n",
      "[Test]: mse: 10345932800.0, rmse: 101714.95858525431, mae 78669.3984375, r2: -49.16805001046513, nrmse: 0.31595300606061594\n",
      "\n",
      "\n",
      "Final Prediction in Stort6\n",
      "[Test]: mse: 6753421312.0, rmse: 82179.20242981189, mae 71091.171875, r2: 0.5046004806547868, nrmse: 0.053474890531214646\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:24,441 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 9376078848.0, rmse: 96830.15464203287, mae: 82132.2734375, r^2: -0.211995418651874, nrmse: 0.1586168343244166\n",
      "INFO logger 2023-08-24 11:49:24,580 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 8371795968.0, rmse: 91497.51891718157, mae: 70738.09375, r^2: -5.539693775838757, nrmse: 0.1008967234537351\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort7\n",
      "[Test]: mse: 9376078848.0, rmse: 96830.15464203287, mae 82132.2734375, r2: -0.211995418651874, nrmse: 0.1586168343244166\n",
      "\n",
      "\n",
      "Final Prediction in Stort8\n",
      "[Test]: mse: 8371795968.0, rmse: 91497.51891718157, mae 70738.09375, r2: -5.539693775838757, nrmse: 0.1008967234537351\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:24,720 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 8674939904.0, rmse: 93139.3574381958, mae: 79889.0859375, r^2: -11.496397543801233, nrmse: 0.17065644034978178\n",
      "INFO logger 2023-08-24 11:49:24,866 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 17262352384.0, rmse: 131386.2716725001, mae: 105502.0703125, r^2: -1.6852509827153836, nrmse: 0.07461175920854439\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort9\n",
      "[Test]: mse: 8674939904.0, rmse: 93139.3574381958, mae 79889.0859375, r2: -11.496397543801233, nrmse: 0.17065644034978178\n",
      "\n",
      "\n",
      "Final Prediction in Stort10\n",
      "[Test]: mse: 17262352384.0, rmse: 131386.2716725001, mae 105502.0703125, r2: -1.6852509827153836, nrmse: 0.07461175920854439\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:25,014 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 13145390080.0, rmse: 114653.34744350032, mae: 101913.2265625, r^2: -1.4418710323448027, nrmse: 0.08605931253339627\n",
      "INFO logger 2023-08-24 11:49:25,145 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 4563082752.0, rmse: 67550.59401663319, mae: 56902.09765625, r^2: -1.0220044588890955, nrmse: 0.07016284897262323\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort11\n",
      "[Test]: mse: 13145390080.0, rmse: 114653.34744350032, mae 101913.2265625, r2: -1.4418710323448027, nrmse: 0.08605931253339627\n",
      "\n",
      "\n",
      "Final Prediction in Stort12\n",
      "[Test]: mse: 4563082752.0, rmse: 67550.59401663319, mae 56902.09765625, r2: -1.0220044588890955, nrmse: 0.07016284897262323\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:25,291 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 15754390528.0, rmse: 125516.49504348024, mae: 106479.546875, r^2: -1.9285413686468544, nrmse: 0.06180859325373026\n",
      "INFO logger 2023-08-24 11:49:25,420 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 19479994368.0, rmse: 139570.7504027975, mae: 109468.7890625, r^2: -0.748025822141066, nrmse: 0.08544797254794159\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort13\n",
      "[Test]: mse: 15754390528.0, rmse: 125516.49504348024, mae 106479.546875, r2: -1.9285413686468544, nrmse: 0.06180859325373026\n",
      "\n",
      "\n",
      "Final Prediction in Stort14\n",
      "[Test]: mse: 19479994368.0, rmse: 139570.7504027975, mae 109468.7890625, r2: -0.748025822141066, nrmse: 0.08544797254794159\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:25,556 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 2984838144.0, rmse: 54633.67225438905, mae: 43296.82421875, r^2: -1.5293214917271998, nrmse: 0.09387582758652359\n",
      "INFO logger 2023-08-24 11:49:25,696 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 8205878272.0, rmse: 90586.30289398061, mae: 68556.546875, r^2: -3.335924862424366, nrmse: 0.16796370877944602\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort15\n",
      "[Test]: mse: 2984838144.0, rmse: 54633.67225438905, mae 43296.82421875, r2: -1.5293214917271998, nrmse: 0.09387582758652359\n",
      "\n",
      "\n",
      "Final Prediction in Stort16\n",
      "[Test]: mse: 8205878272.0, rmse: 90586.30289398061, mae 68556.546875, r2: -3.335924862424366, nrmse: 0.16796370877944602\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:25,852 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 9808998400.0, rmse: 99040.38772137354, mae: 75371.4453125, r^2: -0.13531045185000967, nrmse: 0.10355608680381795\n",
      "INFO logger 2023-08-24 11:49:25,988 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 8837199872.0, rmse: 94006.3820812183, mae: 76059.53125, r^2: -1.0663515844092535, nrmse: 0.0893914161360695\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort17\n",
      "[Test]: mse: 9808998400.0, rmse: 99040.38772137354, mae 75371.4453125, r2: -0.13531045185000967, nrmse: 0.10355608680381795\n",
      "\n",
      "\n",
      "Final Prediction in Stort18\n",
      "[Test]: mse: 8837199872.0, rmse: 94006.3820812183, mae 76059.53125, r2: -1.0663515844092535, nrmse: 0.0893914161360695\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:26,126 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 10263842816.0, rmse: 101310.62538549448, mae: 80884.6484375, r^2: -0.3574098240390928, nrmse: 0.0729808668706167\n",
      "INFO logger 2023-08-24 11:49:26,267 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 37783588864.0, rmse: 194380.0114826625, mae: 135082.6875, r^2: -2.679729238070504, nrmse: 0.093582252006433\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort19\n",
      "[Test]: mse: 10263842816.0, rmse: 101310.62538549448, mae 80884.6484375, r2: -0.3574098240390928, nrmse: 0.0729808668706167\n",
      "\n",
      "\n",
      "Final Prediction in Stort20\n",
      "[Test]: mse: 37783588864.0, rmse: 194380.0114826625, mae 135082.6875, r2: -2.679729238070504, nrmse: 0.093582252006433\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:26,396 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 9552701440.0, rmse: 97737.92222059972, mae: 75547.7109375, r^2: -3.6587864223609143, nrmse: 0.14227549700721406\n",
      "INFO logger 2023-08-24 11:49:26,540 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 7450268160.0, rmse: 86314.93590335337, mae: 66085.65625, r^2: -1.8480575228851563, nrmse: 0.08638501574737843\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort21\n",
      "[Test]: mse: 9552701440.0, rmse: 97737.92222059972, mae 75547.7109375, r2: -3.6587864223609143, nrmse: 0.14227549700721406\n",
      "\n",
      "\n",
      "Final Prediction in Stort22\n",
      "[Test]: mse: 7450268160.0, rmse: 86314.93590335337, mae 66085.65625, r2: -1.8480575228851563, nrmse: 0.08638501574737843\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:26,672 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 18929508352.0, rmse: 137584.5498302771, mae: 109546.9921875, r^2: -0.810752476057566, nrmse: 0.0964324315557492\n",
      "INFO logger 2023-08-24 11:49:26,820 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 18382147584.0, rmse: 135580.77881469778, mae: 115569.953125, r^2: -0.7994994626730856, nrmse: 0.0984019777643855\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort23\n",
      "[Test]: mse: 18929508352.0, rmse: 137584.5498302771, mae 109546.9921875, r2: -0.810752476057566, nrmse: 0.0964324315557492\n",
      "\n",
      "\n",
      "Final Prediction in Stort24\n",
      "[Test]: mse: 18382147584.0, rmse: 135580.77881469778, mae 115569.953125, r2: -0.7994994626730856, nrmse: 0.0984019777643855\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:26,954 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 8669713408.0, rmse: 93111.29581312892, mae: 72639.390625, r^2: -9.977857019066251, nrmse: 0.13302353631010572\n",
      "INFO logger 2023-08-24 11:49:27,087 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 4730739712.0, rmse: 68780.37301439997, mae: 48347.68359375, r^2: -0.3931199161388186, nrmse: 0.06584926186137441\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort25\n",
      "[Test]: mse: 8669713408.0, rmse: 93111.29581312892, mae 72639.390625, r2: -9.977857019066251, nrmse: 0.13302353631010572\n",
      "\n",
      "\n",
      "Final Prediction in Stort26\n",
      "[Test]: mse: 4730739712.0, rmse: 68780.37301439997, mae 48347.68359375, r2: -0.3931199161388186, nrmse: 0.06584926186137441\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:27,227 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 8910679040.0, rmse: 94396.3931514335, mae: 71745.0234375, r^2: 0.4414708502858804, nrmse: 0.05530033102463214\n",
      "INFO logger 2023-08-24 11:49:27,380 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 21077217280.0, rmse: 145179.94792670233, mae: 105706.6484375, r^2: -0.36821140709426925, nrmse: 0.11752394804408511\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort27\n",
      "[Test]: mse: 8910679040.0, rmse: 94396.3931514335, mae 71745.0234375, r2: 0.4414708502858804, nrmse: 0.05530033102463214\n",
      "\n",
      "\n",
      "Final Prediction in Stort28\n",
      "[Test]: mse: 21077217280.0, rmse: 145179.94792670233, mae 105706.6484375, r2: -0.36821140709426925, nrmse: 0.11752394804408511\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:27,536 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 4569482240.0, rmse: 67597.94553091092, mae: 55577.4765625, r^2: -1.5694995096135065, nrmse: 0.1306459419974754\n",
      "INFO logger 2023-08-24 11:49:27,669 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 10984833024.0, rmse: 104808.55415470629, mae: 76350.6875, r^2: -166.27979568459128, nrmse: 0.2426061286576006\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort29\n",
      "[Test]: mse: 4569482240.0, rmse: 67597.94553091092, mae 55577.4765625, r2: -1.5694995096135065, nrmse: 0.1306459419974754\n",
      "\n",
      "\n",
      "Final Prediction in Stort30\n",
      "[Test]: mse: 10984833024.0, rmse: 104808.55415470629, mae 76350.6875, r2: -166.27979568459128, nrmse: 0.2426061286576006\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:27,807 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 13979922432.0, rmse: 118236.72201139542, mae: 99659.5625, r^2: -8.098538853909778, nrmse: 0.0863764775452704\n",
      "INFO logger 2023-08-24 11:49:27,942 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 11785206784.0, rmse: 108559.69226190723, mae: 95007.65625, r^2: -6.732097565506735, nrmse: 0.09142523234244744\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort31\n",
      "[Test]: mse: 13979922432.0, rmse: 118236.72201139542, mae 99659.5625, r2: -8.098538853909778, nrmse: 0.0863764775452704\n",
      "\n",
      "\n",
      "Final Prediction in Stort32\n",
      "[Test]: mse: 11785206784.0, rmse: 108559.69226190723, mae 95007.65625, r2: -6.732097565506735, nrmse: 0.09142523234244744\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:28,102 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 8673247232.0, rmse: 93130.27022402544, mae: 76859.390625, r^2: -26.056969336956826, nrmse: 0.35341108338426447\n",
      "INFO logger 2023-08-24 11:49:28,232 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 4825327616.0, rmse: 69464.57813878955, mae: 54222.359375, r^2: -5.606532620085887, nrmse: 0.0724134955471745\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort33\n",
      "[Test]: mse: 8673247232.0, rmse: 93130.27022402544, mae 76859.390625, r2: -26.056969336956826, nrmse: 0.35341108338426447\n",
      "\n",
      "\n",
      "Final Prediction in Stort34\n",
      "[Test]: mse: 4825327616.0, rmse: 69464.57813878955, mae 54222.359375, r2: -5.606532620085887, nrmse: 0.0724134955471745\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:28,369 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 5611044864.0, rmse: 74906.907985846, mae: 63860.39453125, r^2: -0.8222815663575127, nrmse: 0.08616215134279755\n",
      "INFO logger 2023-08-24 11:49:28,496 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 6476802560.0, rmse: 80478.58448059334, mae: 64855.84765625, r^2: -42.394139024134205, nrmse: 0.2753786210808372\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort35\n",
      "[Test]: mse: 5611044864.0, rmse: 74906.907985846, mae 63860.39453125, r2: -0.8222815663575127, nrmse: 0.08616215134279755\n",
      "\n",
      "\n",
      "Final Prediction in Stort36\n",
      "[Test]: mse: 6476802560.0, rmse: 80478.58448059334, mae 64855.84765625, r2: -42.394139024134205, nrmse: 0.2753786210808372\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:28,625 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 4351974912.0, rmse: 65969.4998616785, mae: 54608.578125, r^2: -21.6637090187263, nrmse: 0.12644538250932902\n",
      "INFO logger 2023-08-24 11:49:28,753 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 4801261056.0, rmse: 69291.13259285058, mae: 54909.14453125, r^2: -6.053081451397815, nrmse: 0.16089244643841813\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort37\n",
      "[Test]: mse: 4351974912.0, rmse: 65969.4998616785, mae 54608.578125, r2: -21.6637090187263, nrmse: 0.12644538250932902\n",
      "\n",
      "\n",
      "Final Prediction in Stort38\n",
      "[Test]: mse: 4801261056.0, rmse: 69291.13259285058, mae 54909.14453125, r2: -6.053081451397815, nrmse: 0.16089244643841813\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:28,903 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 10569256960.0, rmse: 102806.89159779124, mae: 81494.6328125, r^2: -0.4608823094543717, nrmse: 0.06510583562831034\n",
      "INFO logger 2023-08-24 11:49:29,047 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 9815575552.0, rmse: 99073.5865506039, mae: 83186.1171875, r^2: -0.8710413764825595, nrmse: 0.10045184233155413\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort39\n",
      "[Test]: mse: 10569256960.0, rmse: 102806.89159779124, mae 81494.6328125, r2: -0.4608823094543717, nrmse: 0.06510583562831034\n",
      "\n",
      "\n",
      "Final Prediction in Stort40\n",
      "[Test]: mse: 9815575552.0, rmse: 99073.5865506039, mae 83186.1171875, r2: -0.8710413764825595, nrmse: 0.10045184233155413\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:29,186 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 13189861376.0, rmse: 114847.12175757824, mae: 94235.0, r^2: -1.3842356000269564, nrmse: 0.08301570453444677\n",
      "INFO logger 2023-08-24 11:49:29,314 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 5608547840.0, rmse: 74890.23861625759, mae: 58633.58984375, r^2: -2.1331864359324806, nrmse: 0.13392031148355651\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort41\n",
      "[Test]: mse: 13189861376.0, rmse: 114847.12175757824, mae 94235.0, r2: -1.3842356000269564, nrmse: 0.08301570453444677\n",
      "\n",
      "\n",
      "Final Prediction in Stort42\n",
      "[Test]: mse: 5608547840.0, rmse: 74890.23861625759, mae 58633.58984375, r2: -2.1331864359324806, nrmse: 0.13392031148355651\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:29,441 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 5073491968.0, rmse: 71228.44914779488, mae: 59531.84765625, r^2: -6.880300339785043, nrmse: 0.1158478374692395\n",
      "INFO logger 2023-08-24 11:49:29,572 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 2172337152.0, rmse: 46608.33779486241, mae: 36620.16015625, r^2: -18.541195928752096, nrmse: 0.13731539432680973\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort43\n",
      "[Test]: mse: 5073491968.0, rmse: 71228.44914779488, mae 59531.84765625, r2: -6.880300339785043, nrmse: 0.1158478374692395\n",
      "\n",
      "\n",
      "Final Prediction in Stort44\n",
      "[Test]: mse: 2172337152.0, rmse: 46608.33779486241, mae 36620.16015625, r2: -18.541195928752096, nrmse: 0.13731539432680973\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO logger 2023-08-24 11:49:29,703 | helpers.py:60 | Metrics for dimension: 0\n",
      "\tmse: 12602758144.0, rmse: 112262.00668080007, mae: 91324.609375, r^2: -11.712870925231364, nrmse: 0.1519225456258346\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Final Prediction in Stort45\n",
      "[Test]: mse: 12602758144.0, rmse: 112262.00668080007, mae 91324.609375, r2: -11.712870925231364, nrmse: 0.1519225456258346\n",
      "\n",
      "\n",
      "[Test]: test_mse_avg: 11043429546.666666, test_rmse_avg: 100800.51018229853, test_mae_avg 80929.27109375, test_r2_avg: -10.606062550378368, test_nrmse_avg: 0.121468465603555\n",
      "\n",
      "\n",
      "[test_nrmse_all]: [0.09932302873080498, 0.06105293856935637, 0.2210259069618412, 0.07711210691877492, 0.31595300606061594, 0.053474890531214646, 0.1586168343244166, 0.1008967234537351, 0.17065644034978178, 0.07461175920854439, 0.08605931253339627, 0.07016284897262323, 0.06180859325373026, 0.08544797254794159, 0.09387582758652359, 0.16796370877944602, 0.10355608680381795, 0.0893914161360695, 0.0729808668706167, 0.093582252006433, 0.14227549700721406, 0.08638501574737843, 0.0964324315557492, 0.0984019777643855, 0.13302353631010572, 0.06584926186137441, 0.05530033102463214, 0.11752394804408511, 0.1306459419974754, 0.2426061286576006, 0.0863764775452704, 0.09142523234244744, 0.35341108338426447, 0.0724134955471745, 0.08616215134279755, 0.2753786210808372, 0.12644538250932902, 0.16089244643841813, 0.06510583562831034, 0.10045184233155413, 0.08301570453444677, 0.13392031148355651, 0.1158478374692395, 0.13731539432680973, 0.1519225456258346], \n"
     ]
    }
   ],
   "source": [
    "# 测试集合预测\n",
    "def transform_preds_test(y_pred_test):\n",
    "    if not isinstance(y_pred_test, np.ndarray):\n",
    "        y_pred_test = y_pred_test.cpu().numpy()\n",
    "    return y_pred_test\n",
    "\n",
    "\n",
    "def round_predictions_test(y_pred_test, dims):\n",
    "    # round to closest integer\n",
    "    if dims is None or len(dims) == 0:\n",
    "        return y_pred_test\n",
    "    for dim in dims:\n",
    "        y_pred_test[:, dim] = np.rint(y_pred_test[:, dim])\n",
    "    return y_pred_test\n",
    "\n",
    "\n",
    "def inverse_transform_test(\n",
    "        y_test, y_pred_test,\n",
    "        y_scaler=None,\n",
    "        round_preds=False,\n",
    "        dims=None):\n",
    "    y_pred_test = transform_preds_test(y_pred_test)\n",
    "\n",
    "    if y_scaler is not None:\n",
    "        y_test = y_scaler.inverse_transform(y_test)\n",
    "        y_pred_test = y_scaler.inverse_transform(y_pred_test)\n",
    "\n",
    "    # to zeroes\n",
    "    y_pred_test[y_pred_test < 0.] = 0.\n",
    "\n",
    "    if round_preds:\n",
    "        y_pred_test = round_predictions_test(y_pred_test, dims)\n",
    "\n",
    "    return y_test, y_pred_test\n",
    "\n",
    "\n",
    "def predict(\n",
    "        model,\n",
    "        cid,\n",
    "        X_test,\n",
    "        y_test,\n",
    "        exogenous_data_test,\n",
    "        plot,\n",
    "        idxs=[1],\n",
    "        apply_round=True,\n",
    "        round_dimensions=[0]\n",
    "):\n",
    "    if \"test\" in cid:\n",
    "        tmp_cid = cid.split(\"_\")\n",
    "        for s_t in tmp_cid:\n",
    "            if s_t != \"test\":\n",
    "                cid = s_t\n",
    "                break\n",
    "\n",
    "    num_features = len(X_test[0][0])\n",
    "\n",
    "    test_loader = to_torch_dataset(\n",
    "        X_test, y_test,\n",
    "        num_lags=args.num_lags,\n",
    "        num_features=num_features,\n",
    "        exogenous_data=exogenous_data_test,\n",
    "        indices=idxs,\n",
    "        batch_size=1,\n",
    "        shuffle=False\n",
    "    )\n",
    "    test_mse, test_rmse, test_mae, test_r2, test_nrmse, y_pred_test = test(\n",
    "        model, test_loader, None, device=device\n",
    "    )\n",
    "\n",
    "    y_test, y_pred_test = inverse_transform_test(\n",
    "        y_test, y_pred_test, y_scaler, round_preds=apply_round, dims=round_dimensions\n",
    "    )\n",
    "\n",
    "    test_mse, test_rmse, test_mae, test_r2, test_nrmse, test_res_per_dim = accumulate_metric(\n",
    "        y_test, y_pred_test, log_per_output=True, return_all=True\n",
    "    )\n",
    "    print(f\"Final Prediction in {cid}\")\n",
    "    print(f\"[Test]: mse: {test_mse}, rmse: {test_rmse}, mae {test_mae}, \"\n",
    "          f\"r2: {test_r2}, nrmse: {test_nrmse}\\n\\n\")\n",
    "\n",
    "    if plot:\n",
    "        make_plot(y_test, y_pred_test, title=\"Test\", feature_names=args.targets, client=cid)\n",
    "    return test_mse, test_rmse, test_mae, test_r2, test_nrmse\n",
    "\n",
    "\n",
    "def model_test(\n",
    "        model,\n",
    "        data_paths,\n",
    "        x_scaler,\n",
    "        y_scaler,\n",
    "        plot=False,\n",
    "        idxs=[1],\n",
    "        round_predictions=False,\n",
    "        round_dimensions=[0, 3, 4],\n",
    "):\n",
    "    # In testing we should pre-process and post-process our data and\n",
    "    # instruct our model to perform predictions\n",
    "\n",
    "    # read the data\n",
    "    test_mse_avg, test_rmse_avg, test_mae_avg, test_r2_avg, test_nrmse_avg = 0, 0, 0, 0, 0\n",
    "    test_nrmse_all = []\n",
    "    for client in data_paths:\n",
    "        df = read_data(client)\n",
    "\n",
    "        if args.identifier not in df.columns:\n",
    "            cid = os.path.splitext(os.path.basename(client))[0]\n",
    "            # if \"test\" in cid:\n",
    "            #     tmp_cid = cid.split(\"_\")\n",
    "            #     for s_t in tmp_cid:\n",
    "            #         if s_t != \"test\":\n",
    "            #             cid = s_t\n",
    "            #             break\n",
    "            df[args.identifier] = cid\n",
    "\n",
    "        test_data = df.copy()\n",
    "\n",
    "        # get X_test, y_test\n",
    "        X_test, y_test = to_Xy(test_data, targets=args.targets)\n",
    "\n",
    "        # 选择每个归一化\n",
    "        # y_scaler = y_scalers[cid]\n",
    "        # x_scaler = x_scalers[cid]\n",
    "        # scale features, targets\n",
    "        X_test = scale_features(X_test, scaler=x_scaler, per_area=False)\n",
    "        y_test = scale_features(y_test, scaler=y_scaler, per_area=False)\n",
    "\n",
    "        # generate time lags\n",
    "        X_test = generate_time_lags(X_test, args.num_lags)\n",
    "        y_test = generate_time_lags(y_test, args.num_lags, is_y=True)\n",
    "\n",
    "        # get datetime features (if specified)\n",
    "        date_time_df_test = time_to_feature(\n",
    "            X_test, args.use_time_features, identifier=args.identifier\n",
    "        )\n",
    "\n",
    "        # get statistics as features (if specified)\n",
    "        stats_df_test = assign_statistics(X_test, args.assign_stats, args.num_lags,\n",
    "                                          targets=args.targets, identifier=args.identifier)\n",
    "\n",
    "        if date_time_df_test is not None or stats_df_test is not None:\n",
    "            exogenous_data_test = pd.concat([date_time_df_test, stats_df_test], axis=1)\n",
    "            exogenous_data_test = exogenous_data_test.loc[:, ~exogenous_data_test.columns.duplicated()].copy()\n",
    "        else:\n",
    "            exogenous_data_test = None\n",
    "\n",
    "        # transform to numpy\n",
    "        if exogenous_data_test is not None:\n",
    "            exogenous_data_test = get_exogenous_data_by_area(\n",
    "                exogenous_data_test, identifier=args.identifier\n",
    "            )\n",
    "\n",
    "            for cid in exogenous_data_test:\n",
    "                exogenous_data_test[cid] = exogenous_data_test[cid].to_numpy()\n",
    "\n",
    "        # remove identifiers\n",
    "        X_test, y_test = remove_identifiers(X_test, y_test)\n",
    "\n",
    "        num_features = len(X_test.columns) // args.num_lags\n",
    "\n",
    "        # to timeseries representation\n",
    "        X_test = to_timeseries_rep(X_test.to_numpy(), args.num_lags, num_features=num_features)\n",
    "\n",
    "        y_test = y_test.to_numpy()\n",
    "\n",
    "        if exogenous_data_test is not None:\n",
    "            assert len(exogenous_data_test) == 1\n",
    "            exogenous_data_test = exogenous_data_test[next(iter(exogenous_data_test))]\n",
    "\n",
    "        # make predictions\n",
    "        test_mse, test_rmse, test_mae, test_r2, test_nrmse = predict(model, cid, X_test, y_test, exogenous_data_test,\n",
    "                                                                     plot, idxs)\n",
    "        test_mse_avg = test_mse_avg + test_mse\n",
    "        test_rmse_avg = test_rmse_avg + test_rmse\n",
    "        test_mae_avg = test_mae_avg + test_mae\n",
    "        test_r2_avg = test_r2_avg + test_r2\n",
    "        test_nrmse_avg = test_nrmse_avg + test_nrmse\n",
    "        test_nrmse_all.append(test_nrmse)\n",
    "    print(\n",
    "        f\"[Test]: test_mse_avg: {test_mse_avg / len(data_paths)}, test_rmse_avg: {test_rmse_avg / len(data_paths)}, test_mae_avg {test_mae_avg / len(data_paths)}, \"f\"test_r2_avg: {test_r2_avg / len(data_paths)}, test_nrmse_avg: {test_nrmse_avg / len(data_paths)}\\n\\n\")\n",
    "    print(f\"[test_nrmse_all]: {test_nrmse_all}, \")\n",
    "\n",
    "\n",
    "test_data_paths = [\"../mydatase/marketData/Stort1_test.csv\", \"../mydatase/marketData/Stort2_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort3_test.csv\", \"../mydatase/marketData/Stort4_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort5_test.csv\", \"../mydatase/marketData/Stort6_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort7_test.csv\", \"../mydatase/marketData/Stort8_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort9_test.csv\", \"../mydatase/marketData/Stort10_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort11_test.csv\", \"../mydatase/marketData/Stort12_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort13_test.csv\", \"../mydatase/marketData/Stort14_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort15_test.csv\", \"../mydatase/marketData/Stort16_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort17_test.csv\", \"../mydatase/marketData/Stort18_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort19_test.csv\", \"../mydatase/marketData/Stort20_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort21_test.csv\", \"../mydatase/marketData/Stort22_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort23_test.csv\", \"../mydatase/marketData/Stort24_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort25_test.csv\", \"../mydatase/marketData/Stort26_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort27_test.csv\", \"../mydatase/marketData/Stort28_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort29_test.csv\", \"../mydatase/marketData/Stort30_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort31_test.csv\", \"../mydatase/marketData/Stort32_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort33_test.csv\", \"../mydatase/marketData/Stort34_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort35_test.csv\", \"../mydatase/marketData/Stort36_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort37_test.csv\", \"../mydatase/marketData/Stort38_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort39_test.csv\", \"../mydatase/marketData/Stort40_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort41_test.csv\", \"../mydatase/marketData/Stort42_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort43_test.csv\", \"../mydatase/marketData/Stort44_test.csv\",\n",
    "                   \"../mydatase/marketData/Stort45_test.csv\"]\n",
    "model_test(trained_model,\n",
    "           test_data_paths,\n",
    "           x_scaler, y_scaler,\n",
    "           round_predictions=True,\n",
    "           )"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "start_time": "2023-08-24T11:49:23.447672Z",
     "end_time": "2023-08-24T11:49:29.717219Z"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [],
   "metadata": {
    "collapsed": false
   }
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3.9.10 64-bit",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.10"
  },
  "vscode": {
   "interpreter": {
    "hash": "a39106e1a9d6d153b7400628e7589ff266b5caee5b0db427f0903be982155882"
   }
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
