{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "<i>Copyright (c) Microsoft Corporation. All rights reserved.</i>\n",
    "\n",
    "<i>Licensed under the MIT License.</i>"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Wide and Deep Model for Movie Recommendation\n",
    "\n",
    "<br>\n",
    "\n",
    "A linear model with a wide set of crossed-column (co-occurrence) features can memorize the feature interactions, while deep neural networks (DNN) can generalize the feature patterns through low-dimensional dense embeddings learned for the sparse features. [**Wide-and-deep**](https://arxiv.org/abs/1606.07792) learning jointly trains wide linear model and deep neural networks to combine the benefits of memorization and generalization for recommender systems.\n",
    "\n",
    "This notebook shows how to build and test the wide-and-deep model using [TensorFlow high-level Estimator API](https://www.tensorflow.org/api_docs/python/tf/estimator/DNNLinearCombinedRegressor). With the [movie recommendation dataset](https://grouplens.org/datasets/movielens/), we quickly demonstrate following topics:\n",
    "1. How to prepare data\n",
    "2. Build the model\n",
    "3. Use log-hook to estimate performance while training\n",
    "4. Test the model and export"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from tensorboard.notebook import display\n",
    "%reload_ext autoreload\n",
    "%autoreload 2\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Tensorflow Version: 2.8.0\n",
      "GPUs:\n",
      " []\n"
     ]
    }
   ],
   "source": [
    "import itertools\n",
    "import math\n",
    "import os\n",
    "from tempfile import TemporaryDirectory\n",
    "\n",
    "import numpy as np\n",
    "import scrapbook as sb\n",
    "import pandas as pd\n",
    "import sklearn.preprocessing\n",
    "import tensorflow as tf\n",
    "tf.get_logger().setLevel('ERROR') # only show error messages\n",
    "\n",
    "from recommenders.utils.constants import (\n",
    "    DEFAULT_USER_COL as USER_COL,\n",
    "    DEFAULT_ITEM_COL as ITEM_COL,\n",
    "    DEFAULT_RATING_COL as RATING_COL,\n",
    "    DEFAULT_PREDICTION_COL as PREDICT_COL,\n",
    "    SEED\n",
    ")\n",
    "from recommenders.utils import tf_utils, gpu_utils, plot\n",
    "from recommenders.datasets import movielens\n",
    "from recommenders.datasets.pandas_df_utils import user_item_pairs\n",
    "from recommenders.datasets.python_splitters import python_random_split\n",
    "import recommenders.evaluation.python_evaluation as evaluator\n",
    "import recommenders.models.wide_deep.wide_deep_utils as wide_deep\n",
    "\n",
    "print(\"Tensorflow Version:\", tf.__version__)\n",
    "print(\"GPUs:\\n\", gpu_utils.get_gpu_info())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "collapsed": true,
    "tags": [
     "parameters"
    ]
   },
   "outputs": [],
   "source": [
    "\"\"\"Parameters (papermill)\"\"\"\n",
    "\n",
    "# Recommend top k items\n",
    "TOP_K = 10\n",
    "# Select MovieLens data size: 100k, 1m, 10m, or 20m\n",
    "MOVIELENS_DATA_SIZE = '100k'\n",
    "# Metrics to use for evaluation\n",
    "RANKING_METRICS = [\n",
    "    evaluator.ndcg_at_k.__name__,\n",
    "    evaluator.precision_at_k.__name__,\n",
    "]\n",
    "RATING_METRICS = [\n",
    "    evaluator.rmse.__name__,\n",
    "    evaluator.mae.__name__,\n",
    "]\n",
    "# Use session hook to evaluate model while training\n",
    "EVALUATE_WHILE_TRAINING = True\n",
    "# Item feature column name\n",
    "ITEM_FEAT_COL = 'genres'\n",
    "\n",
    "RANDOM_SEED = SEED  # Set seed for deterministic result\n",
    "\n",
    "# Train and test set pickle file paths. If provided, use them. Otherwise, download the MovieLens dataset.\n",
    "DATA_DIR = None\n",
    "TRAIN_PICKLE_PATH = None\n",
    "TEST_PICKLE_PATH = None\n",
    "EXPORT_DIR_BASE = './outputs/model'\n",
    "# Model checkpoints directory. If None, use temp-dir.\n",
    "MODEL_DIR = None\n",
    "\n",
    "#### Hyperparameters\n",
    "MODEL_TYPE = 'wide_deep'\n",
    "STEPS = 50000  # Number of batches to train\n",
    "BATCH_SIZE = 32\n",
    "# Wide (linear) model hyperparameters\n",
    "LINEAR_OPTIMIZER = 'adagrad'\n",
    "LINEAR_OPTIMIZER_LR = 0.0621  # Learning rate\n",
    "LINEAR_L1_REG = 0.0           # Regularization rate for FtrlOptimizer\n",
    "LINEAR_L2_REG = 0.0\n",
    "LINEAR_MOMENTUM = 0.0         # Momentum for MomentumOptimizer or RMSPropOptimizer\n",
    "# DNN model hyperparameters\n",
    "DNN_OPTIMIZER = 'adadelta'\n",
    "DNN_OPTIMIZER_LR = 0.1\n",
    "DNN_L1_REG = 0.0           # Regularization rate for FtrlOptimizer\n",
    "DNN_L2_REG = 0.0\n",
    "DNN_MOMENTUM = 0.0         # Momentum for MomentumOptimizer or RMSPropOptimizer\n",
    "# Layer dimensions. Defined as follows to make this notebook runnable from Hyperparameter tuning services like AzureML Hyperdrive\n",
    "DNN_HIDDEN_LAYER_1 = 0     # Set 0 to not use this layer\n",
    "DNN_HIDDEN_LAYER_2 = 64    # Set 0 to not use this layer\n",
    "DNN_HIDDEN_LAYER_3 = 128   # Set 0 to not use this layer\n",
    "DNN_HIDDEN_LAYER_4 = 512   # Note, at least one layer should have nodes.\n",
    "DNN_HIDDEN_UNITS = [h for h in [DNN_HIDDEN_LAYER_1, DNN_HIDDEN_LAYER_2, DNN_HIDDEN_LAYER_3, DNN_HIDDEN_LAYER_4] if h > 0]\n",
    "DNN_USER_DIM = 32          # User embedding feature dimension\n",
    "DNN_ITEM_DIM = 16          # Item embedding feature dimension\n",
    "DNN_DROPOUT = 0.8\n",
    "DNN_BATCH_NORM = 1         # 1 to use batch normalization, 0 if not."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "if MODEL_DIR is None:\n",
    "    TMP_DIR = TemporaryDirectory()\n",
    "    model_dir = TMP_DIR.name\n",
    "else:\n",
    "    if os.path.exists(MODEL_DIR) and os.listdir(MODEL_DIR):\n",
    "        raise ValueError(\n",
    "            \"Model exists in {}. Use different directory name or \"\n",
    "            \"remove the existing checkpoint files first\".format(MODEL_DIR)\n",
    "        )\n",
    "    TMP_DIR = None\n",
    "    model_dir = MODEL_DIR"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 1. Prepare Data\n",
    "\n",
    "#### 1.1 Movie Rating and Genres Data\n",
    "First, download [MovieLens](https://grouplens.org/datasets/movielens/) data. Movies in the data set are tagged as one or more genres where there are total 19 genres including '*unknown*'. We load *movie genres* to use them as item features."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Problem downloading http://files.grouplens.org/datasets/movielens/ml-100k.zip\n",
      "Problem downloading http://files.grouplens.org/datasets/movielens/ml-100k.zip\n",
      "Problem downloading http://files.grouplens.org/datasets/movielens/ml-100k.zip\n",
      "Problem downloading http://files.grouplens.org/datasets/movielens/ml-100k.zip\n",
      "100%|██████████| 4.81k/4.81k [00:04<00:00, 1.07kKB/s]\n"
     ]
    },
    {
     "ename": "TypeError",
     "evalue": "%d format: a number is required, not DataFrame",
     "output_type": "error",
     "traceback": [
      "\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
      "\u001B[0;31mTypeError\u001B[0m                                 Traceback (most recent call last)",
      "Input \u001B[0;32mIn [21]\u001B[0m, in \u001B[0;36m<cell line: 2>\u001B[0;34m()\u001B[0m\n\u001B[1;32m      2\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m use_preset:\n\u001B[1;32m      3\u001B[0m     \u001B[38;5;66;03m# The genres of each movie are returned as '|' separated string, e.g. \"Animation|Children's|Comedy\".\u001B[39;00m\n\u001B[1;32m      4\u001B[0m     data \u001B[38;5;241m=\u001B[39m movielens\u001B[38;5;241m.\u001B[39mload_pandas_df(\n\u001B[1;32m      5\u001B[0m         size\u001B[38;5;241m=\u001B[39mMOVIELENS_DATA_SIZE,\n\u001B[1;32m      6\u001B[0m         header\u001B[38;5;241m=\u001B[39m[USER_COL, ITEM_COL, RATING_COL],\n\u001B[1;32m      7\u001B[0m         genres_col\u001B[38;5;241m=\u001B[39mITEM_FEAT_COL\n\u001B[1;32m      8\u001B[0m     )\n\u001B[0;32m----> 9\u001B[0m     \u001B[43mdisplay\u001B[49m\u001B[43m(\u001B[49m\u001B[43mdata\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mhead\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[43m)\u001B[49m\n",
      "File \u001B[0;32m/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/tensorboard/notebook.py:258\u001B[0m, in \u001B[0;36mdisplay\u001B[0;34m(port, height)\u001B[0m\n\u001B[1;32m    247\u001B[0m \u001B[38;5;28;01mdef\u001B[39;00m \u001B[38;5;21mdisplay\u001B[39m(port\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mNone\u001B[39;00m, height\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mNone\u001B[39;00m):\n\u001B[1;32m    248\u001B[0m     \u001B[38;5;124;03m\"\"\"Display a TensorBoard instance already running on this machine.\u001B[39;00m\n\u001B[1;32m    249\u001B[0m \n\u001B[1;32m    250\u001B[0m \u001B[38;5;124;03m    Args:\u001B[39;00m\n\u001B[0;32m   (...)\u001B[0m\n\u001B[1;32m    256\u001B[0m \u001B[38;5;124;03m        (currently 800).\u001B[39;00m\n\u001B[1;32m    257\u001B[0m \u001B[38;5;124;03m    \"\"\"\u001B[39;00m\n\u001B[0;32m--> 258\u001B[0m     \u001B[43m_display\u001B[49m\u001B[43m(\u001B[49m\u001B[43mport\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mport\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mheight\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mheight\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mprint_message\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mTrue\u001B[39;49;00m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mdisplay_handle\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mNone\u001B[39;49;00m\u001B[43m)\u001B[49m\n",
      "File \u001B[0;32m/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/tensorboard/notebook.py:310\u001B[0m, in \u001B[0;36m_display\u001B[0;34m(port, height, print_message, display_handle)\u001B[0m\n\u001B[1;32m    303\u001B[0m         \u001B[38;5;28;01mpass\u001B[39;00m\n\u001B[1;32m    305\u001B[0m fn \u001B[38;5;241m=\u001B[39m {\n\u001B[1;32m    306\u001B[0m     _CONTEXT_COLAB: _display_colab,\n\u001B[1;32m    307\u001B[0m     _CONTEXT_IPYTHON: _display_ipython,\n\u001B[1;32m    308\u001B[0m     _CONTEXT_NONE: _display_cli,\n\u001B[1;32m    309\u001B[0m }[_get_context()]\n\u001B[0;32m--> 310\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mfn\u001B[49m\u001B[43m(\u001B[49m\u001B[43mport\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mport\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mheight\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mheight\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mdisplay_handle\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mdisplay_handle\u001B[49m\u001B[43m)\u001B[49m\n",
      "File \u001B[0;32m/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/tensorboard/notebook.py:397\u001B[0m, in \u001B[0;36m_display_ipython\u001B[0;34m(port, height, display_handle)\u001B[0m\n\u001B[1;32m    385\u001B[0m     replacements \u001B[38;5;241m=\u001B[39m [\n\u001B[1;32m    386\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mHTML_ID\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, html\u001B[38;5;241m.\u001B[39mescape(frame_id, quote\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mTrue\u001B[39;00m)),\n\u001B[1;32m    387\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mJSON_ID\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, json\u001B[38;5;241m.\u001B[39mdumps(frame_id)),\n\u001B[0;32m   (...)\u001B[0m\n\u001B[1;32m    390\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mURL\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, json\u001B[38;5;241m.\u001B[39mdumps(proxy_url)),\n\u001B[1;32m    391\u001B[0m     ]\n\u001B[1;32m    392\u001B[0m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[1;32m    393\u001B[0m     replacements \u001B[38;5;241m=\u001B[39m [\n\u001B[1;32m    394\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mHTML_ID\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, html\u001B[38;5;241m.\u001B[39mescape(frame_id, quote\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mTrue\u001B[39;00m)),\n\u001B[1;32m    395\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mJSON_ID\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, json\u001B[38;5;241m.\u001B[39mdumps(frame_id)),\n\u001B[1;32m    396\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mHEIGHT\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, \u001B[38;5;124m\"\u001B[39m\u001B[38;5;132;01m%d\u001B[39;00m\u001B[38;5;124m\"\u001B[39m \u001B[38;5;241m%\u001B[39m height),\n\u001B[0;32m--> 397\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mPORT\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, \u001B[38;5;124;43m\"\u001B[39;49m\u001B[38;5;132;43;01m%d\u001B[39;49;00m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[43m \u001B[49m\u001B[38;5;241;43m%\u001B[39;49m\u001B[43m \u001B[49m\u001B[43mport\u001B[49m),\n\u001B[1;32m    398\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mURL\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, json\u001B[38;5;241m.\u001B[39mdumps(\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m/\u001B[39m\u001B[38;5;124m\"\u001B[39m)),\n\u001B[1;32m    399\u001B[0m     ]\n\u001B[1;32m    401\u001B[0m \u001B[38;5;28;01mfor\u001B[39;00m (k, v) \u001B[38;5;129;01min\u001B[39;00m replacements:\n\u001B[1;32m    402\u001B[0m     shell \u001B[38;5;241m=\u001B[39m shell\u001B[38;5;241m.\u001B[39mreplace(k, v)\n",
      "\u001B[0;31mTypeError\u001B[0m: %d format: a number is required, not DataFrame"
     ]
    }
   ],
   "source": [
    "use_preset = (TRAIN_PICKLE_PATH is not None and TEST_PICKLE_PATH is not None)\n",
    "if not use_preset:\n",
    "    # The genres of each movie are returned as '|' separated string, e.g. \"Animation|Children's|Comedy\".\n",
    "    data = movielens.load_pandas_df(\n",
    "        size=MOVIELENS_DATA_SIZE,\n",
    "        header=[USER_COL, ITEM_COL, RATING_COL],\n",
    "        genres_col=ITEM_FEAT_COL\n",
    "    )\n",
    "    display(data.head())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 1.2 Encode Item Features (Genres)\n",
    "To use genres from our model, we multi-hot-encode them with scikit-learn's [MultiLabelBinarizer](https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.MultiLabelBinarizer.html).\n",
    "\n",
    "For example, *Movie id=2355* has three genres, *Animation|Children's|Comedy*, which are being converted into an integer array of the indicator value for each genre like `[0, 0, 1, 1, 1, 0, 0, 0, ...]`. In the later step, we convert this into a float array and feed into the model.\n",
    "\n",
    "> For faster feature encoding, you may load ratings and items separately (by using `movielens.load_item_df`), encode the item-features, then combine the rating and item dataframes by using join-operation. "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Genres: ['Action' 'Adventure' 'Animation' \"Children's\" 'Comedy' 'Crime'\n",
      " 'Documentary' 'Drama' 'Fantasy' 'Film-Noir' 'Horror' 'Musical' 'Mystery'\n",
      " 'Romance' 'Sci-Fi' 'Thriller' 'War' 'Western' 'unknown']\n"
     ]
    },
    {
     "ename": "TypeError",
     "evalue": "%d format: a number is required, not DataFrame",
     "output_type": "error",
     "traceback": [
      "\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
      "\u001B[0;31mTypeError\u001B[0m                                 Traceback (most recent call last)",
      "Input \u001B[0;32mIn [22]\u001B[0m, in \u001B[0;36m<cell line: 1>\u001B[0;34m()\u001B[0m\n\u001B[1;32m      4\u001B[0m data[ITEM_FEAT_COL] \u001B[38;5;241m=\u001B[39m genres_encoder\u001B[38;5;241m.\u001B[39mfit_transform(\n\u001B[1;32m      5\u001B[0m     data[ITEM_FEAT_COL]\u001B[38;5;241m.\u001B[39mapply(\u001B[38;5;28;01mlambda\u001B[39;00m s: s\u001B[38;5;241m.\u001B[39msplit(\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m|\u001B[39m\u001B[38;5;124m\"\u001B[39m))\n\u001B[1;32m      6\u001B[0m )\u001B[38;5;241m.\u001B[39mtolist()\n\u001B[1;32m      7\u001B[0m \u001B[38;5;28mprint\u001B[39m(\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mGenres:\u001B[39m\u001B[38;5;124m\"\u001B[39m, genres_encoder\u001B[38;5;241m.\u001B[39mclasses_)\n\u001B[0;32m----> 8\u001B[0m \u001B[43mdisplay\u001B[49m\u001B[43m(\u001B[49m\u001B[43mdata\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mhead\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[43m)\u001B[49m\n",
      "File \u001B[0;32m/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/tensorboard/notebook.py:258\u001B[0m, in \u001B[0;36mdisplay\u001B[0;34m(port, height)\u001B[0m\n\u001B[1;32m    247\u001B[0m \u001B[38;5;28;01mdef\u001B[39;00m \u001B[38;5;21mdisplay\u001B[39m(port\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mNone\u001B[39;00m, height\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mNone\u001B[39;00m):\n\u001B[1;32m    248\u001B[0m     \u001B[38;5;124;03m\"\"\"Display a TensorBoard instance already running on this machine.\u001B[39;00m\n\u001B[1;32m    249\u001B[0m \n\u001B[1;32m    250\u001B[0m \u001B[38;5;124;03m    Args:\u001B[39;00m\n\u001B[0;32m   (...)\u001B[0m\n\u001B[1;32m    256\u001B[0m \u001B[38;5;124;03m        (currently 800).\u001B[39;00m\n\u001B[1;32m    257\u001B[0m \u001B[38;5;124;03m    \"\"\"\u001B[39;00m\n\u001B[0;32m--> 258\u001B[0m     \u001B[43m_display\u001B[49m\u001B[43m(\u001B[49m\u001B[43mport\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mport\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mheight\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mheight\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mprint_message\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mTrue\u001B[39;49;00m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mdisplay_handle\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mNone\u001B[39;49;00m\u001B[43m)\u001B[49m\n",
      "File \u001B[0;32m/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/tensorboard/notebook.py:310\u001B[0m, in \u001B[0;36m_display\u001B[0;34m(port, height, print_message, display_handle)\u001B[0m\n\u001B[1;32m    303\u001B[0m         \u001B[38;5;28;01mpass\u001B[39;00m\n\u001B[1;32m    305\u001B[0m fn \u001B[38;5;241m=\u001B[39m {\n\u001B[1;32m    306\u001B[0m     _CONTEXT_COLAB: _display_colab,\n\u001B[1;32m    307\u001B[0m     _CONTEXT_IPYTHON: _display_ipython,\n\u001B[1;32m    308\u001B[0m     _CONTEXT_NONE: _display_cli,\n\u001B[1;32m    309\u001B[0m }[_get_context()]\n\u001B[0;32m--> 310\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mfn\u001B[49m\u001B[43m(\u001B[49m\u001B[43mport\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mport\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mheight\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mheight\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mdisplay_handle\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mdisplay_handle\u001B[49m\u001B[43m)\u001B[49m\n",
      "File \u001B[0;32m/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/tensorboard/notebook.py:397\u001B[0m, in \u001B[0;36m_display_ipython\u001B[0;34m(port, height, display_handle)\u001B[0m\n\u001B[1;32m    385\u001B[0m     replacements \u001B[38;5;241m=\u001B[39m [\n\u001B[1;32m    386\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mHTML_ID\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, html\u001B[38;5;241m.\u001B[39mescape(frame_id, quote\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mTrue\u001B[39;00m)),\n\u001B[1;32m    387\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mJSON_ID\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, json\u001B[38;5;241m.\u001B[39mdumps(frame_id)),\n\u001B[0;32m   (...)\u001B[0m\n\u001B[1;32m    390\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mURL\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, json\u001B[38;5;241m.\u001B[39mdumps(proxy_url)),\n\u001B[1;32m    391\u001B[0m     ]\n\u001B[1;32m    392\u001B[0m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[1;32m    393\u001B[0m     replacements \u001B[38;5;241m=\u001B[39m [\n\u001B[1;32m    394\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mHTML_ID\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, html\u001B[38;5;241m.\u001B[39mescape(frame_id, quote\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mTrue\u001B[39;00m)),\n\u001B[1;32m    395\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mJSON_ID\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, json\u001B[38;5;241m.\u001B[39mdumps(frame_id)),\n\u001B[1;32m    396\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mHEIGHT\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, \u001B[38;5;124m\"\u001B[39m\u001B[38;5;132;01m%d\u001B[39;00m\u001B[38;5;124m\"\u001B[39m \u001B[38;5;241m%\u001B[39m height),\n\u001B[0;32m--> 397\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mPORT\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, \u001B[38;5;124;43m\"\u001B[39;49m\u001B[38;5;132;43;01m%d\u001B[39;49;00m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[43m \u001B[49m\u001B[38;5;241;43m%\u001B[39;49m\u001B[43m \u001B[49m\u001B[43mport\u001B[49m),\n\u001B[1;32m    398\u001B[0m         (\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124mURL\u001B[39m\u001B[38;5;124m%\u001B[39m\u001B[38;5;124m\"\u001B[39m, json\u001B[38;5;241m.\u001B[39mdumps(\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m/\u001B[39m\u001B[38;5;124m\"\u001B[39m)),\n\u001B[1;32m    399\u001B[0m     ]\n\u001B[1;32m    401\u001B[0m \u001B[38;5;28;01mfor\u001B[39;00m (k, v) \u001B[38;5;129;01min\u001B[39;00m replacements:\n\u001B[1;32m    402\u001B[0m     shell \u001B[38;5;241m=\u001B[39m shell\u001B[38;5;241m.\u001B[39mreplace(k, v)\n",
      "\u001B[0;31mTypeError\u001B[0m: %d format: a number is required, not DataFrame"
     ]
    }
   ],
   "source": [
    "if not use_preset and ITEM_FEAT_COL is not None:\n",
    "    # Encode 'genres' into int array (multi-hot representation) to use as item features\n",
    "    genres_encoder = sklearn.preprocessing.MultiLabelBinarizer()\n",
    "    data[ITEM_FEAT_COL] = genres_encoder.fit_transform(\n",
    "        data[ITEM_FEAT_COL].apply(lambda s: s.split(\"|\"))\n",
    "    ).tolist()\n",
    "    print(\"Genres:\", genres_encoder.classes_)\n",
    "    display(data.head())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 1.3 Train and Test Split"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "75000 train samples and 25000 test samples\n"
     ]
    }
   ],
   "source": [
    "if not use_preset:\n",
    "    train, test = python_random_split(data, ratio=0.75, seed=RANDOM_SEED)\n",
    "else:\n",
    "    train = pd.read_pickle(path=TRAIN_PICKLE_PATH if DATA_DIR is None else os.path.join(DATA_DIR, TRAIN_PICKLE_PATH))\n",
    "    test = pd.read_pickle(path=TEST_PICKLE_PATH if DATA_DIR is None else os.path.join(DATA_DIR, TEST_PICKLE_PATH))\n",
    "    data = pd.concat([train, test])\n",
    "\n",
    "print(\"{} train samples and {} test samples\".format(len(train), len(test)))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Total 1682 items and 943 users in the dataset\n"
     ]
    }
   ],
   "source": [
    "# Unique items in the dataset\n",
    "if ITEM_FEAT_COL is None:\n",
    "    items = data.drop_duplicates(ITEM_COL)[[ITEM_COL]].reset_index(drop=True)\n",
    "    item_feat_shape = None\n",
    "else:\n",
    "    items = data.drop_duplicates(ITEM_COL)[[ITEM_COL, ITEM_FEAT_COL]].reset_index(drop=True)\n",
    "    item_feat_shape = len(items[ITEM_FEAT_COL][0])\n",
    "# Unique users in the dataset\n",
    "users = data.drop_duplicates(USER_COL)[[USER_COL]].reset_index(drop=True)\n",
    "\n",
    "print(\"Total {} items and {} users in the dataset\".format(len(items), len(users)))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 2. Build Model\n",
    "\n",
    "Wide-and-deep model consists of a linear model and DNN. We use the following hyperparameters and feature sets for the model:\n",
    "\n",
    "<br> | <div align=\"center\">Wide (linear) model</div> | <div align=\"center\">Deep neural networks</div>\n",
    "---|---|---\n",
    "Feature set | <ul><li>User-item co-occurrence features<br>to capture how their co-occurrence<br>correlates with the target rating</li></ul> | <ul><li>Deep, lower-dimensional embedding vectors<br>for every user and item</li><li>Item feature vector</li></ul>\n",
    "Hyperparameters | <ul><li>FTRL optimizer</li><li>Learning rate = 0.0029</li><li>L1 regularization = 0.0</li></ul> | <ul><li>Adagrad optimizer</li><li>Learning rate = 0.1</li><li>Hidden units = [128, 256, 32]</li><li>Dropout rate = 0.4</li><li>Use batch normalization (Batch size = 64)</li><li>User embedding vector size = 4</li><li>Item embedding vector size = 4</li></ul>\n",
    "\n",
    "<br>\n",
    "\n",
    "* [FTRL optimizer](https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf)\n",
    "* [Adagrad optimizer](http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf)\n",
    "\n",
    "Note, the hyperparameters are optimized for the training set. We used **Azure Machine Learning service** ([AzureML](https://azure.microsoft.com/en-us/services/machine-learning-service/)) to find the best hyperparameters, where we further split the training set into two subsets for training and validation respectively so that the test set is being separated from the tuning and training phases. For more details, see [azureml_hyperdrive_wide_and_deep.ipynb](../04_model_select_and_optimize/azureml_hyperdrive_wide_and_deep.ipynb)."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# Create model checkpoint every n steps. We store the model 5 times.\n",
    "save_checkpoints_steps = max(1, STEPS // 5)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Wide feature specs:\n",
      "\t VocabularyListCategoricalColumn(key='userID', vocabulary_list=(196, 63, 226, 154, 306, 296, 34, 271, ...\n",
      "\t VocabularyListCategoricalColumn(key='itemID', vocabulary_list=(242, 302, 377, 51, 346, 474, 265, 465 ...\n",
      "\t CrossedColumn(keys=(VocabularyListCategoricalColumn(key='userID', vocabulary_list=(196, 63, 226, 154 ...\n",
      "Deep feature specs:\n",
      "\t EmbeddingColumn(categorical_column=VocabularyListCategoricalColumn(key='userID', vocabulary_list=(19 ...\n",
      "\t EmbeddingColumn(categorical_column=VocabularyListCategoricalColumn(key='itemID', vocabulary_list=(24 ...\n",
      "\t NumericColumn(key='genres', shape=(19,), default_value=None, dtype=tf.float32, normalizer_fn=None) ...\n"
     ]
    }
   ],
   "source": [
    "# Define wide (linear) and deep (dnn) features\n",
    "wide_columns, deep_columns = wide_deep.build_feature_columns(\n",
    "    users=users[USER_COL].values,\n",
    "    items=items[ITEM_COL].values,\n",
    "    user_col=USER_COL,\n",
    "    item_col=ITEM_COL,\n",
    "    item_feat_col=ITEM_FEAT_COL,\n",
    "    crossed_feat_dim=1000,\n",
    "    user_dim=DNN_USER_DIM,\n",
    "    item_dim=DNN_ITEM_DIM,\n",
    "    item_feat_shape=item_feat_shape,\n",
    "    model_type=MODEL_TYPE,\n",
    ")\n",
    "\n",
    "print(\"Wide feature specs:\")\n",
    "for c in wide_columns:\n",
    "    print(\"\\t\", str(c)[:100], \"...\")\n",
    "print(\"Deep feature specs:\")\n",
    "for c in deep_columns:\n",
    "    print(\"\\t\", str(c)[:100], \"...\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Build a model based on the parameters\n",
    "model = wide_deep.build_model(\n",
    "    model_dir=model_dir,\n",
    "    wide_columns=wide_columns,\n",
    "    deep_columns=deep_columns,\n",
    "    linear_optimizer=tf_utils.build_optimizer(LINEAR_OPTIMIZER, LINEAR_OPTIMIZER_LR, **{\n",
    "        'l1_regularization_strength': LINEAR_L1_REG,\n",
    "        'l2_regularization_strength': LINEAR_L2_REG,\n",
    "        'momentum': LINEAR_MOMENTUM,\n",
    "    }),\n",
    "    dnn_optimizer=tf_utils.build_optimizer(DNN_OPTIMIZER, DNN_OPTIMIZER_LR, **{\n",
    "        'l1_regularization_strength': DNN_L1_REG,\n",
    "        'l2_regularization_strength': DNN_L2_REG,\n",
    "        'momentum': DNN_MOMENTUM,  \n",
    "    }),\n",
    "    dnn_hidden_units=DNN_HIDDEN_UNITS,\n",
    "    dnn_dropout=DNN_DROPOUT,\n",
    "    dnn_batch_norm=(DNN_BATCH_NORM==1),\n",
    "    log_every_n_iter=max(1, STEPS//10),  # log 10 times\n",
    "    save_checkpoints_steps=save_checkpoints_steps,\n",
    "    seed=RANDOM_SEED\n",
    ")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 3. Train and Evaluate Model\n",
    "\n",
    "Now we are all set to train the model. Here, we show how to utilize session hooks to track model performance while training. Our custom hook `tf_utils.evaluation_log_hook` estimates the model performance on the given data based on the specified evaluation functions. Note we pass test set to evaluate the model on rating metrics while we use <span id=\"ranking-pool\">ranking-pool (all the user-item pairs)</span> for ranking metrics.\n",
    "\n",
    "> Note: The TensorFlow Estimator's default loss calculates Mean Squared Error. Square root of the loss is the same as [RMSE](https://en.wikipedia.org/wiki/Root-mean-square_deviation)."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "cols = {\n",
    "    'col_user': USER_COL,\n",
    "    'col_item': ITEM_COL,\n",
    "    'col_rating': RATING_COL,\n",
    "    'col_prediction': PREDICT_COL,\n",
    "}\n",
    "\n",
    "# Prepare ranking evaluation set, i.e. get the cross join of all user-item pairs\n",
    "# compute the users and movies 's cartesian to make a matrix\n",
    "ranking_pool = user_item_pairs(\n",
    "    user_df=users,\n",
    "    item_df=items,\n",
    "    user_col=USER_COL,\n",
    "    item_col=ITEM_COL,\n",
    "    user_item_filter_df=train,  # Remove seen items\n",
    "    shuffle=True,\n",
    "    seed=RANDOM_SEED\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {
    "collapsed": true,
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "# Define training hooks to track performance while training\n",
    "hooks = []\n",
    "if EVALUATE_WHILE_TRAINING:\n",
    "    evaluation_logger = tf_utils.MetricsLogger()\n",
    "    for metrics in (RANKING_METRICS, RATING_METRICS):\n",
    "        if len(metrics) > 0:\n",
    "            hooks.append(\n",
    "                tf_utils.evaluation_log_hook(\n",
    "                    model,\n",
    "                    logger=evaluation_logger,\n",
    "                    true_df=test,\n",
    "                    y_col=RATING_COL,\n",
    "                    eval_df=ranking_pool if metrics==RANKING_METRICS else test.drop(RATING_COL, axis=1),\n",
    "                    every_n_iter=save_checkpoints_steps,\n",
    "                    model_dir=model_dir,\n",
    "                    eval_fns=[evaluator.metrics[m] for m in metrics],\n",
    "                    **({**cols, 'k': TOP_K} if metrics==RANKING_METRICS else cols)\n",
    "                )\n",
    "            )\n",
    "\n",
    "# Define training input (sample feeding) function\n",
    "train_fn = tf_utils.pandas_input_fn(\n",
    "    df=train,\n",
    "    y_col=RATING_COL,\n",
    "    batch_size=BATCH_SIZE,\n",
    "    num_epochs=None,  # We use steps=TRAIN_STEPS instead.\n",
    "    shuffle=True,\n",
    "    seed=RANDOM_SEED,\n",
    ")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Let's train the model."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Training steps = 50000, Batch size = 32 (num epochs = 21)\n",
      "WARNING:tensorflow:From /usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/tensorflow/python/training/training_util.py:396: Variable.initialized_value (from tensorflow.python.ops.variables) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use Variable.read_value. Variables in 2.X are initialized automatically both in eager and graph (inside tf.defun) contexts.\n",
      "INFO:tensorflow:Calling model_fn.\n",
      "WARNING:tensorflow:From /usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/layers/normalization/batch_normalization.py:520: _colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Colocations handled automatically by placer.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/engine/base_layer_v1.py:1684: UserWarning: `layer.add_variable` is deprecated and will be removed in a future version. Please use `layer.add_weight` method instead.\n",
      "  warnings.warn('`layer.add_variable` is deprecated and '\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/tensorflow/python/training/adagrad.py:138: calling Constant.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Call initializer instance with the dtype argument instead of passing it to the constructor\n",
      "INFO:tensorflow:Done calling model_fn.\n",
      "INFO:tensorflow:Create CheckpointSaverHook.\n",
      "INFO:tensorflow:Graph was finalized.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2022-04-29 23:37:18.660718: I tensorflow/core/platform/cpu_feature_guard.cc:151] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX2 FMA\n",
      "To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Running local_init_op.\n",
      "INFO:tensorflow:Done running local_init_op.\n",
      "INFO:tensorflow:Calling checkpoint listeners before saving checkpoint 0...\n",
      "INFO:tensorflow:Saving checkpoints for 0 into /var/folders/0g/hn4z3dz916b61gmv11shxdf80000gn/T/tmpj3p39lfp/model.ckpt.\n",
      "INFO:tensorflow:Calling checkpoint listeners after saving checkpoint 0...\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/engine/base_layer_v1.py:1684: UserWarning: `layer.add_variable` is deprecated and will be removed in a future version. Please use `layer.add_weight` method instead.\n",
      "  warnings.warn('`layer.add_variable` is deprecated and '\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:loss = 432.8184, step = 0\n",
      "INFO:tensorflow:global_step/sec: 141.122\n",
      "INFO:tensorflow:loss = 37.69476, step = 5000 (10.535 sec)\n",
      "INFO:tensorflow:Calling checkpoint listeners before saving checkpoint 10000...\n",
      "INFO:tensorflow:Saving checkpoints for 10000 into /var/folders/0g/hn4z3dz916b61gmv11shxdf80000gn/T/tmpj3p39lfp/model.ckpt.\n",
      "INFO:tensorflow:Calling checkpoint listeners after saving checkpoint 10000...\n",
      "INFO:tensorflow:global_step/sec: 470.201\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/engine/base_layer_v1.py:1684: UserWarning: `layer.add_variable` is deprecated and will be removed in a future version. Please use `layer.add_weight` method instead.\n",
      "  warnings.warn('`layer.add_variable` is deprecated and '\n",
      "/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/engine/base_layer_v1.py:1684: UserWarning: `layer.add_variable` is deprecated and will be removed in a future version. Please use `layer.add_weight` method instead.\n",
      "  warnings.warn('`layer.add_variable` is deprecated and '\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:loss = 18.764023, step = 10000 (34.810 sec)\n",
      "INFO:tensorflow:global_step/sec: 144.89\n",
      "INFO:tensorflow:loss = 19.69486, step = 15000 (10.333 sec)\n",
      "INFO:tensorflow:Calling checkpoint listeners before saving checkpoint 20000...\n",
      "INFO:tensorflow:Saving checkpoints for 20000 into /var/folders/0g/hn4z3dz916b61gmv11shxdf80000gn/T/tmpj3p39lfp/model.ckpt.\n",
      "INFO:tensorflow:Calling checkpoint listeners after saving checkpoint 20000...\n",
      "INFO:tensorflow:global_step/sec: 471.133\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/engine/base_layer_v1.py:1684: UserWarning: `layer.add_variable` is deprecated and will be removed in a future version. Please use `layer.add_weight` method instead.\n",
      "  warnings.warn('`layer.add_variable` is deprecated and '\n",
      "/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/engine/base_layer_v1.py:1684: UserWarning: `layer.add_variable` is deprecated and will be removed in a future version. Please use `layer.add_weight` method instead.\n",
      "  warnings.warn('`layer.add_variable` is deprecated and '\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:loss = 37.011353, step = 20000 (34.650 sec)\n",
      "INFO:tensorflow:global_step/sec: 143.642\n",
      "INFO:tensorflow:loss = 27.60313, step = 25000 (10.771 sec)\n",
      "INFO:tensorflow:Calling checkpoint listeners before saving checkpoint 30000...\n",
      "INFO:tensorflow:Saving checkpoints for 30000 into /var/folders/0g/hn4z3dz916b61gmv11shxdf80000gn/T/tmpj3p39lfp/model.ckpt.\n",
      "INFO:tensorflow:Calling checkpoint listeners after saving checkpoint 30000...\n",
      "INFO:tensorflow:global_step/sec: 483.244\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/engine/base_layer_v1.py:1684: UserWarning: `layer.add_variable` is deprecated and will be removed in a future version. Please use `layer.add_weight` method instead.\n",
      "  warnings.warn('`layer.add_variable` is deprecated and '\n",
      "/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/engine/base_layer_v1.py:1684: UserWarning: `layer.add_variable` is deprecated and will be removed in a future version. Please use `layer.add_weight` method instead.\n",
      "  warnings.warn('`layer.add_variable` is deprecated and '\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:loss = 28.627806, step = 30000 (35.042 sec)\n",
      "INFO:tensorflow:global_step/sec: 142.456\n",
      "INFO:tensorflow:loss = 28.617062, step = 35000 (10.403 sec)\n",
      "INFO:tensorflow:Calling checkpoint listeners before saving checkpoint 40000...\n",
      "INFO:tensorflow:Saving checkpoints for 40000 into /var/folders/0g/hn4z3dz916b61gmv11shxdf80000gn/T/tmpj3p39lfp/model.ckpt.\n",
      "INFO:tensorflow:Calling checkpoint listeners after saving checkpoint 40000...\n",
      "INFO:tensorflow:global_step/sec: 493.597\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/engine/base_layer_v1.py:1684: UserWarning: `layer.add_variable` is deprecated and will be removed in a future version. Please use `layer.add_weight` method instead.\n",
      "  warnings.warn('`layer.add_variable` is deprecated and '\n",
      "/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/engine/base_layer_v1.py:1684: UserWarning: `layer.add_variable` is deprecated and will be removed in a future version. Please use `layer.add_weight` method instead.\n",
      "  warnings.warn('`layer.add_variable` is deprecated and '\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:loss = 25.62237, step = 40000 (34.694 sec)\n",
      "INFO:tensorflow:global_step/sec: 143.176\n",
      "INFO:tensorflow:loss = 21.767944, step = 45000 (10.358 sec)\n",
      "INFO:tensorflow:Calling checkpoint listeners before saving checkpoint 50000...\n",
      "INFO:tensorflow:Saving checkpoints for 50000 into /var/folders/0g/hn4z3dz916b61gmv11shxdf80000gn/T/tmpj3p39lfp/model.ckpt.\n",
      "WARNING:tensorflow:From /usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/tensorflow/python/training/saver.py:1052: remove_checkpoint (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use standard file APIs to delete files with this prefix.\n",
      "INFO:tensorflow:Calling checkpoint listeners after saving checkpoint 50000...\n",
      "INFO:tensorflow:Loss for final step: 23.86943.\n"
     ]
    }
   ],
   "source": [
    "print(\n",
    "    \"Training steps = {}, Batch size = {} (num epochs = {})\"\n",
    "    .format(STEPS, BATCH_SIZE, (STEPS*BATCH_SIZE)//len(train))\n",
    ")\n",
    "tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO)\n",
    "\n",
    "try:\n",
    "    model.train(\n",
    "        input_fn=train_fn,\n",
    "        hooks=hooks,\n",
    "        steps=STEPS\n",
    "    )\n",
    "except tf.train.NanLossDuringTrainingError:\n",
    "    import warnings\n",
    "    warnings.warn(\n",
    "        \"Training stopped with NanLossDuringTrainingError. \"\n",
    "        \"Try other optimizers, smaller batch size and/or smaller learning rate.\"\n",
    "    )"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "application/scrapbook.scrap.json+json": {
       "name": "eval_ndcg_at_k",
       "data": [
        0.014169790473514545,
        0.13618394840682413,
        0.1143346562632753,
        0.10054056869279787,
        0.0913062995488209
       ],
       "encoder": "json",
       "version": 1
      }
     },
     "metadata": {
      "scrapbook": {
       "name": "eval_ndcg_at_k",
       "data": true,
       "display": false
      }
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "application/scrapbook.scrap.json+json": {
       "name": "eval_precision_at_k",
       "data": [
        0.013467656415694591,
        0.12682926829268293,
        0.10848356309650055,
        0.09777306468716863,
        0.0909862142099682
       ],
       "encoder": "json",
       "version": 1
      }
     },
     "metadata": {
      "scrapbook": {
       "name": "eval_precision_at_k",
       "data": true,
       "display": false
      }
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "application/scrapbook.scrap.json+json": {
       "name": "eval_rmse",
       "data": [
        3.770617201000398,
        0.9702126038421389,
        0.9577070909193176,
        0.9542918222183457,
        0.9528037742594986
       ],
       "encoder": "json",
       "version": 1
      }
     },
     "metadata": {
      "scrapbook": {
       "name": "eval_rmse",
       "data": true,
       "display": false
      }
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "application/scrapbook.scrap.json+json": {
       "name": "eval_mae",
       "data": [
        3.596596319773281,
        0.7796735506153106,
        0.7643287268853187,
        0.7599827148866654,
        0.757829128909111
       ],
       "encoder": "json",
       "version": 1
      }
     },
     "metadata": {
      "scrapbook": {
       "name": "eval_mae",
       "data": true,
       "display": false
      }
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "text/plain": "<Figure size 720x720 with 4 Axes>",
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAm4AAAJNCAYAAACIkPmLAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAABt0klEQVR4nO3deXxcdb3/8ddnsrVN2ixtCumSBNqylELbJOyILKIVEFxA4HoVXG5FRfS6wvVevHo3l3sVEVTqLqiAuCGg/BBR9iUJaWmLhVKadIOmbZLuTZN8fn/MSTsNaZNpZ+bMmbyfj8c8cuZ7vmfmc2D48jnnfBdzd0REREQk+8XCDkBEREREhkeJm4iIiEhEKHETERERiQglbiIiIiIRocRNREREJCKUuImIiIhERH66v8DM5gHfAvKAH7j7VwbsPxO4ETgBuNzd7x6wfxywFPidu19zoO+aMGGC19bWpi54ERm2pqamDe5eGXYckjy1nSLhOJh2M62Jm5nlAbcA5wGrgWfN7B53X5pQrQ24CvjMfj7mP4BHhvN9tbW1NDY2HnzAInLQzKw17Bjk4KjtFAnHwbSb6X5UehKw3N1XuHs3cAdwcWIFd1/p7ouAvoEHm1k9cBjw/9Icp4iIiEjWS3fiNhlYlfB+dVA2JDOLAf/H/u/EiYiIiIwo2Tw44aPA/e6++kCVzGy+mTWaWWN7e3uGQhMRERHJvHQPTlgDTE14PyUoG45TgTeY2UeBEqDQzLa6+3WJldx9AbAAoKGhQQuvioiISM5Kd+L2LDDDzI4gnrBdDvzDcA509/f0b5vZVUDDwKRNREREZCRJ66NSd+8BrgEeAF4A7nL3JWb2ZTO7CMDMTjSz1cClwK1mtiSdMYmIiIhEVdrncXP3+4H7B5TdkLD9LPFHqAf6jJ8AP0lDeCIiIiKRkc2DE0REREQkgRK3iOvr03gMEZFk9PU57mo7JZqUuEXYoy+1c+pXHqKpdVPYoYiIRMb3HnmZt37rUV58bUvYoYgkTYlbhD3x8kaOPnwc83/WxG1PteoKUkRkGJ58eSPHTSrl8gVP8bMnV6rtlEhR4hZhTa0dfOiMI7j7I6dx25Mr+fyvF7Fzd2/YYYmIZK3ePqelrZMvXHAsd199Kr9qXM2HftrIxq27wg5NZFiUuEXU7t4+lqzpYk51GUdMKOa3Hz2dbd29XHbrk6zt3BF2eCIiWeml9VuoHFtERXEhR1aW8OuPnMb0w0o4/6ZHeeRFrb4j2U+JW0S9sG4zU8rHMG5UAQDFRfncfMVcLjihiotveZwnX94YcoQiItmnqbWDupryPe8L82Nc/9Zj+ca75/C5uxfxn/cuZVePnlxI9lLiFlEDGx8AM2P+mdP45rvn8PFfPscPHl2hvhsiIgmaWjuoH9B2Apw+fQJ//MQbaNu0nXfc8gTL128NITqRoSlxi6j9NT4AZ8yYwG8/ehq/fW4Nn7ijhe3dPRmOTkQkOzW3dlBXPXjbWV5cyK3vrec9p1Tz7luf5OdPa9CXZB8lbhHVfIDEDWBqxRh+/ZHTyM8z3vmdJ2jduC2D0YmIZJ8NW3excVs3MyaW7LeOmfGek2u468On8POn2vjwbU10bOvOYJQiB6bELYLWde1gZ08ftePHHLDeqII8/u/S2VxxUjXv+u4T/HXZ+gxFKCKSfZ5r62RudTmxmA1Zd/rEsfz2Y6dRXTGG8296lCeWb8hAhCJDU+IWQc2tndRVl2M2dONjZlx5Wi3feU89n//1Im55eLlu/YvIiNTU2kH9fh6TDqYoP49/vXAmX33XCfzzXS185Y9/p7unL40RigxNiVsExQcmlCV1zElHVPD7j53Bn194jatvb2LLzt3pCU5EUsLM5pnZMjNbbmbXDbL/TDNrNrMeM7skoXyOmT1pZkvMbJGZXZbZyLPXUF1M9ufMoyq5/9o38NJrW3jXd59gRbsGLkh4lLhFUFNbcleN/Q4vHcUd809hfEkRb7/lcY2aEslSZpYH3AK8FZgJXGFmMwdUawOuAn4xoHw78D53Pw6YB9xoZmVpDTgCunv6WLy2i9lTSw/q+PElRfzgygYubZjCJd97krueXaWnFxIKJW4Rs3N3Ly++uoUTppQd1PFF+Xn89zuOZ/6ZR3LZrU/ywJJXUxugiKTCScByd1/h7t3AHcDFiRXcfaW7LwL6BpS/6O4vBdtrgfVAZWbCzl5L122mumIMY4O5Lw+GmfG+U2v55T+dwg8fe4WP/aKZru16eiGZpcQtYhat7uKow0oYXZh3SJ9z2YnV/PCqE/nSPUv43weW0dunK0eRLDIZWJXwfnVQlhQzOwkoBF5OUVyRdaAplJJ19OFj+f01pzNx7CjOv+lRnl6hCc8lc5S4RUxz2+sn3j1Yc6aWcc/Hz+DZlZv4wE+e1ZWjSA4xsyrgNuD97v66HvVmNt/MGs2ssb0995d6am5LXeIG8VH7/37Rcfzn22dxzS+f438fWMbuXg1ckPRT4hYxTQeYPPJgTCgp4vYPncy0yhLedvNjvLBuc8o+W0QO2hpgasL7KUHZsJjZOOA+4Avu/tRgddx9gbs3uHtDZWXuP0k92IEJQzn7mIncd+0ZPL+mi0u/96TmzJS0U+IWIe6elsanIC/GDW+byafffBTv+cHT3LNwbUo/X0SS9iwww8yOMLNC4HLgnuEcGNT/LfAzd787jTFGxtrOHXT39FFdceC5Lw/WxLGj+PFVJ3LR7Em84ztP8Oum1Rq4IGmjxC1CWjdupzA/xqSy0Wn5/IvnTOb2D57M1x/4O/9131J6dNtfJBTu3gNcAzwAvADc5e5LzOzLZnYRgJmdaGargUuBW81sSXD4u4EzgavMrCV4zcn8WWSP/rWdhzP35cGKxYwPnHEEt3/wZL73t5f5xB0tbNa0S5IGStwiZLCF5VNt5qRx/OGaM/j7q1t47w+fYePWXWn9PhEZnLvf7+5Hufs0d/+voOwGd78n2H7W3ae4e7G7jw+m/8Ddb3f3Anefk/BqCfFUQpfKgQlDmTlpHPdccwbjRudz/rcepXHlpox8r4wcStwipPkg529LVtmYQn7y/pOYW13GRTc/zqLVnWn/ThGRdHkuxQMThjK6MI//fPvxfPFtx3H17c1888EX9QRDUkaJW4Rk4o5bv7yY8bl5x/BvFx7LVT9+lrsaVw19kIhIltnR3cuLr23l+MkHN/HuoThv5mHcd+0ZNLV2cNmCp1i1aXvGY5Dco8QtIrbs3E3bpu3MrBqX0e+dN6uKO+efwvf++jL/+rvntU6fiETKotWdHH34WEYVHNrclwfrsHGj+NkHTmLecYdz8S2P8/uWYQ8OFhmUEreIaFnVyaxJpRTmZ/5f2YzDxvK7a07n1a5dXPH9p3ht886MxyAicjCa2lI7hdLBiMWMfzrzSH72gZP41kMv8ak7W7RetBw0JW4RkcnHpIMZN6qABe+t541HVXLRzY+pw62IREK65m87GLMml3Lvx8+gqCDGBTc9xnNtHWGHJBGkxC0iMjkqan9iMePac2fwlXeewIdva+K2J1dqriIRyVruTnNbJ3U1ZWGHsseYwnz+550n8C/nH8M//ayRm//ykpYclKQocYuAvj6nZVUnddVlYYcCxGcK//VHTuP2p9r47N2L2Lm7N+yQREReZ+XG7YwuyKOqND1zXx6KebOq+MPHz+Dx5Ru54vtPsaZzR9ghSUSkPXEzs3lmtszMlpvZdYPsP9PMms2sx8wuSSifY2ZPmtkSM1tkZpelO9Zs9dL6rYwvLmR8SVHYoexRO6GY33z0NHbs7uXS7z2pRkdEsk7YXUyGUlU6mts/dDJnHz2Ri779GPcu0qo1MrS0Jm5mlgfcArwVmAlcYWYzB1RrA64CfjGgfDvwvmBSyXnAjWZWls54s1W2Nj7FRfncfMVc3ja7irff8jhPLN8QdkgiInvE13YuCzuMA8qLGR85axo/uupE/veBZXz2VwvZtqsn7LAki6X7jttJwHJ3X+Hu3cAdwMWJFdx9pbsvAvoGlL/o7i8F22uB9UDur4Q8iGzo37Y/Zsb8M6dx42VzuPaOFr7/yAr1exORrJBNAxOGMntqGfdd+wYALvz2Y5r4XPYr3YnbZCBx5tbVQVlSzOwkoBB4OUVxRUpzhmf9PhinT5/A7z52Gr9fuIZr72hhe7euGEUkPJt37mZVx3aOzfDcl4eiuCifr186m0+/+Sje/+Nn+e5fX6ZPAxdkgKwfnGBmVcBtwPvd/XWzv5rZfDNrNLPG9vb2zAeYZpu2dbNh6y5mTBwbdihDmlI+hruvPo3CvBjv/M4TtG7cFnZIIjJCtbR1cvzkUgrysv5/c69z4QmTuOfjZ/Dw39fznh88zbou9SGWvdL9i14DTE14PyUoGxYzGwfcB3zB3Z8arI67L3D3BndvqKzMvSepza0dzJlaRl7Mwg5lWEYV5PG/l57AP5xczbu++wQPL1sfdkgiMgJlcxeT4ZhcNppfzj+F06aN523ffow/LV4XdkiSJdKduD0LzDCzI8ysELgcuGc4Bwb1fwv8zN3vTmOMWS0bZv1OlpnxvlNr+e4/1nPdrxfx7Yde0u1+Ecmo5gi2nQPlxYyPnzuDBe9r4L/uf4Hrf/O8uqFIehM3d+8BrgEeAF4A7nL3JWb2ZTO7CMDMTjSz1cClwK1mtiQ4/N3AmcBVZtYSvOakM95sFOWrxhNrK7jnmjN4eNl6Pnx7k5Z4EZGM6O1zWto6s3I0/sGoqy7n/mvfwK7dvVz47cdYvKYr7JAkRGl/+O/u97v7Ue4+zd3/Kyi7wd3vCbafdfcp7l7s7uOD6T9w99vdvcDd5yS8WtIdbzbZ3dvH4jVdzMny4ewHcti4Udwx/1Qmji3i4lseZ/n6LWGHJCI57sXXtlA5toiK4sKwQ0mZsaMK+MZlc/jEuTO48kfP8P1HVuhJxggVvV6bI8gL6zZTXTGGcaMKwg7lkBTmx/ivdxzP1WdO4923PsWfFr8adkgiksOa27Jz7stUuHjOZH73sdP505JXufLHz7B+886wQ5IMU+KWxbJ14t2D9e4Tp/Ljq07ky39Ywtcf+LvW5xORtIhyF5PhmFoxhjvnn0JddTnn3/QY339kBZ3bu8MOSzJEiVsWi8/6nVuNz+ypZdzz8TNoau3g/T95Vo2NiKRclCbePVj5eTH++byj+Mn7T2Tpus2c+bWH+dzdC9X/bQRQ4pbFcrXxmVBSxO0fPJkZE0u46ObHWbp2c9ghiUiO2LB1Fxu3dTO9siTsUDJi1uRSvnnZHP7ymbOoGV/M/J818o7vPM5vn1vNrp7esMOTNFDilqXWdu5gZ08ftePHhB1KWuTnxfi3C2fy6TcfxT/+8Gl+3zLs6f1ERParubWDudXlxCIy92WqTCgp4mNnT+eRz53N1W+cxq+b1nD6V/7C1/70d9Z0agLfXJIfdgAyuP45iMxyu/G5eM5kZkwcy9W3N/Hw39dz7bkzOHKEXCmLSOo1tXVQn2NdTJKRnxfjLccdzluOO5yX27dy25OtnP+tRzn5iAred2otp08fn/P/X8l1uuOWpZpbO3PyMelgZk4ax33XnsG0yhIu+d6TfOrOFla0bw07LBGJoOdGUNs5lGmVJfz7RcfxxHXncOZRlfzHvUs59xt/48ePv8JmzasZWUrcslR8xYSysMPImLGjCvj4uTP462fP4ogJxfEE7q4WXtmg9U5FZHi6e/pYvLaL2VNLww4lqxQX5fOPp9Twp0++gf95x/E0tnZwxlf+wr/89nn+/qr6GEeNHpVmoZ27e3nx1S2cMKUs7FAyblyQwF15ei0/eXwl7/ruE5x1dCUfP2cGR0woDjs8EcliS4O5L8dGfO7LdDEzTj5yPCcfOZ7XNu/kl8+0ceWPnqGmopj3nVbDW447nII83c/Jdvo3lIUWre7iqMNKGF2YF3YooRk3qoBrgztwteOLedd3n9AdOBE5oFyfvy2VDhs3ik++6Sge+/w5XHlaLbc92crpX/kL33zwRV7TpL5ZTYlbFsq1iXcPxWAJ3KfvWshKJXAiMkCuTqGUTgV5MS44oYo7P3wqt33wZDZs3cV53/gbH/t5M0+t2Ii7JkrPNkrcslBzmxqfgfoTuIc/cxbVFWN4pxI4ERlAbeehOfrwsfzXO47nsevO4cTacr7w2+eZd+Oj3PZUK9t29YQdngSUuGUZd6c5B1dMSJXS0QV84k17E7h3fOdxJXAiwtrOHezu7aO6IjfnvsykcaMKuOr0I/jzp97IDW+byWMvtXPaV/7CF3+/mOXrNeI/bBqckGVaN26nMD/GpLLRYYeS1foTuKuCQQzv/O4TnHPMRK45ezq1GsQgMuI0BRPvao6y1DEzTp8+gdOnT2Bt5w5+8XQbly94iqMPL+G9p9TypmMnkq/BDBmnf+JZRv3bkpN4B25qefwR6md+pTtwIiONBiak16Sy0XzmLUfz+HVn8+6GqXz/0RWc+bWHufkvL9G+ZVfY4Y0oStyyzEif9ftgJSZwU8pH847vPM5nfrWQ1o1K4CSazGyemS0zs+Vmdt0g+880s2Yz6zGzSwbsu9LMXgpeV2Yu6vCof1tmFOXncfGcyfz6I6ex4H0NrNq0g3P/76984o7naGrdpMEMGaDELctoVNShKR1dwCffdBR//ezZTCkfzdtveZzPKoGTiDGzPOAW4K3ATOAKM5s5oFobcBXwiwHHVgBfBE4GTgK+aGY53ajs6O7lpde2cvxkTbybSbMml/LVS07g0c+dw/GTS/nUXQu54KbHuOOZNnZ0a4H7dFHilkW27NxN26btzJw0LuxQIi8xgZusBE6i5yRgubuvcPdu4A7g4sQK7r7S3RcBfQOOfQvwoLtvcvcO4EFgXiaCDsui1Z0cffhYRhWM3Lkvw1Q6poAPveFIHv70WXxu3tE8uPQ1TvvKQ/zHvUvVbSUNlLhlkZZVncyaVKqZq1MoMYGbVBZP4D5390LaNm4POzSRA5kMrEp4vzooS/exkdSkx6RZIRYzzjp6Ij+86kTuueYM8mPGO7/7BFf+6BkeeuE1evv0GDUVlCFkEQ1MSJ/S0QX883lH8dfPnE1V6WguvuUxJXAyopnZfDNrNLPG9vb2sMM5JJpCKftMrRjD9ecfyxPXncOFJ1TxrYde4o1ff5jv/e1lOrZ1hx1epClxyyIaFZV+pWOUwEkkrAGmJryfEpSl7Fh3X+DuDe7eUFlZedCBhs3dg4vesrBDkUGMKsjj0oap3HPNGdz8D3W8+NoWzvz6w3z6roUsXNUZdniRpMQtS/T1OS2rOqmrLgs7lBEhMYE7PEjgPn/3IiVwki2eBWaY2RFmVghcDtwzzGMfAN5sZuXBoIQ3B2U56ZUN2xhTmE9Vqea+zHZzppbxjXfP4W+fPZvpE0v46M+bufjmx/h102p27tZghuFS4pYlXlq/lQklRYwvKQo7lBGldEwBnwoSuMNKR+1J4FZtUgIn4XH3HuAa4gnXC8Bd7r7EzL5sZhcBmNmJZrYauBS41cyWBMduAv6DePL3LPDloCwnNbd1qotJxFQUF/KRs6bxyOfO5uPnzOD3C9dy+lf+wv/c/wJPr9hId8/A8TaSSCsnZIn4rN9lYYcxYvUncB88/Qh++PgrXHTzY7x55uFcc850pmoJHQmBu98P3D+g7IaE7WeJPwYd7NgfAT9Ka4BZoqm1g3q1nZGUFzPeNPMw3jTzMFa0b+XXzav57/tf4OX2bdTXlHNGsGrDMYePJRbTihj9lLhlCfVvyw79CdwHTq/lR4/FE7i3HHc4HztbCZxINmpu7eCKk6YOXVGy2pGVJXz2Lcfw2bdA5/ZunlqxkceWb+AXv2hj847dnDZ9AmdMH8/p0ycwpXxkt8VK3LJEc1sH/3TmEWGHIYGyMYV86s1H84EzjlACJ5KlunbsZlXHdo6t0tyXuaRsTCHzZlUxb1YVAKs7tvPE8o08/vIGvv7AMkqK8vesoXrqkeMpLy4MOeLMUuKWBTZu3cWGLbuYMXFs2KHIAErgRLJXy6pOjp+suS9z3ZTyMbz7xDG8+8SpuDvLXtvCYy9t4FeNq/jc3Ys4YkIxp0+fwBnTJ9BQW57zEzErccsCz7V1Mqe6jDw9w89aiQncD4MEbt6sw/noWUrgRMKiJQJHHjPjmMPHcczh4/jQG46ku6ePllWdPLZ8Azf++UVeWLeZOdVl8Tty0yYwa3Jpzv2/Ne2XKVooeWhNbZo8MirKxhTy6TcfzcOfOYsJJUVcdPNjXP8bjUIVCYMWlpfC/BgnHVHBp847irs/chpP/cu5fOD0I2jfsovP3r2Quv94kKtva+K2p1p5ZcM23KO/ekNa77glLJR8HvFlV541s3vcfWlCtf6Fkj8z4Nj+hZIbAAeagmM70hlzGJpaO7jm7OlhhyFJ6E/gPqg7cCKh6O1zWto6mXu5EjfZa+yoAs499jDOPfYwANZv3snjL2/gsZc2cstflpMXM04PBjmcNm0ClWOjNwVXuh+V7lkoGcDM+hdK3pO4ufvKYN9+F0oO9vcvlPzLNMecUbt7+1i8pos5Gs4eSf0J3AdO3zeBu+Kkao6fXIpZbt2iF8kWL762hcqxRVSMsI7pkpyJ40bxjrlTeMfcKbg7KzZs4/HlG7hv0Tr+7XeLmVQ2ek//uJOOqKC4KPt7kKU7wsEWOz75EI7NuYWSl67dTHXFGMaNKgg7FDkE5cWFfOYt8TtwP31yJZ+4o4Wevj4uOH4SFxxfxazJ45TEiaSQ1naWZJkZ0ypLmFZZwvtOraWnt4/n13Tx+PIN3PrIy1zzi2aOm1QaT+RmjOeEKWVZOfAl+1PLIZjZfGA+QHV1dcjRJE+NT24pLy7kk286ik+cO4MX1m3hvufX8vFfNtPncMEJVVxwfBXHTVISJ3Komls7OPGIirDDkAjLz4sxt7qcudXlXHPODHZ09/Lsyk08vnwDN/x+CW2btnNSbUWQyE1gxsSSrGi70524HepCyWcNOPavAyu5+wJgAUBDQ0Pkeh02t3Vw9tETww5DUszMmDlpHDMnjeMzbz6aJWs3c9/z6/joz5uJGZx/fBUXnFDFzColcSIHo7mtg6vPmhZ2GJJDRhfmceZRlZx5VCUQn6rryRUbeXz5Bn78xCvs3N23ZzWH06ePD2193HQnbnsWSiaeiF0O/MMwj30A+O9gkWSIL5R8fepDDFdzaweffvPRYYchaWRmzJpcyqzJpXzuLfEk7t5F67j69ibyYzEuOL6K84+v4tiqsUriRIZhw9ZdbNrWzfTKkrBDkRw2vqSIC0+YxIUnTAKgbeN2Hn95Aw8vW89/3beUiuLCPRMBn3LkeEpHZ6bLU1oTN3fvMbP+hZLzgB/1L5QMNLr7PWZ2IvBboBx4m5l9yd2Pc/dNZta/UDLk4ELJazt3sLOnj9rxGoU4UiQmcZ+fdzTPr+nivufXMf+2RgrzYvHHqSdUcfRhSuJE9qe5tYM51eVav1Iyqnr8GKrHV3PFSdX09TlL123m8eUbuP2pVj51ZwszDhu7545cXU0ZRfnpmQjYcmFOk34NDQ3e2NgYdhjDdu+itfzuubX84MqGsEORkLk7i1bHk7j7Fq1jVEH8TtwFJ0ziqMOyo1/FUMysyd31Y46gqLWd//PHFxhTkM8n3jQj7FBEANi5u5fmtg6eWB5fY3Xi2CIWvG/o5vBg2s3ID06IMi0sL/3MjNlTy5g9tYzr33oMC1d3cd+itbz/x88wpig/SOKqOOowLYsm0tzawSfOPSrsMET2GFWQx2nT4nPDfeYtR9Pbl76bYkrcQtTc1skXzj827DAky5gZc6aWMWdqGf9y/rE8t6qT+xet48ofPUNJUT4XnFDFhSdUMV1r28oI1N3Tx5K1m5k9tTTsUET2K53LbClxC8nO3b28+OoWTpiixkf2z8yoqy6nrrp8TxJ336J1/OMPnqF0dMGe0anTJ6qTtowMS9dtpmZ8MWM196WMUErcQrJodRdHHVbCqIL0dF6U3BOLGfU15dTXlPOvFxxLc1sH9z2/jvf84CnKxxTGR6eeUMU0jbSTHNbU2kGdVpqREUyJW0g08a4ciljMaKitoKG2gn+7YCZNbR3ct2gdVyx4ioriQi48IT7FyJFK4iTHNLd2cO6xmvtSRi4lbiFpau3g7XMnhR2G5IBYzDixtoITayu44cKZNLZ2cN+itVy24CkqS4r2rNhQO6E47FBFDllTawefm6e5L2XkUuIWAnfnubYO/uPtx4UdiuSYWMw46YgKTjqighvedhzPrtzE/c+v45LvPclh4/YmcTXjlcRJ9Kzt3EFPXx/VFZr7UkYuJW4haN24naL8WGjLZcjIkBczTjlyPKccOZ4vvu04nnllE/c9v5Z3ffcJDi8dxQXHT+KC46uo1gTQaWVmb3X3Pw4ou9rdvxdWTFEV799WHol5DUXSRYlbCJpaO5ir/m2SQXkx49Rp4zl12ni+dNEsnn5lI/ctWsc7vvM4k8tHx0enHl/FVN3JSId/M7Nd7v4XADP7HHA2oMQtSeobLKLELRRNbR3UV6vxkXDkxWzPRJFfuug4nn5lE/cuWsfbb3mcKeWjuSAY2DClXElcilwE3GtmnwXmAccAF4cbUjQ1t3XwbxfODDsMkVApcQtBc2sHlzVMDTsMEfLzYnsWSf6Pi4/jqRXxx6kX3fw4UyvGcOHxVXzoDUfo0dQhcPcNZnYR8GegCbjEc2mtwQzZ3t3DS69t5fjJmvtSRjYlbhm2eedu2jZtZ+akcWGHIrKP/LwYZ8yYwBkzJvDli/t48uWNtKzqVNJ2kMxsC5CYoBUCRwKXmJm7uxqBJCxa3cXRh4/V3Jcy4ilxy7CFqzqZNbmUgrxY2KGI7FdBXowzj6rkzKMqww4lstx9WGuSmdlx7r4k3fFEXXOb1nYWAVD2kGH9o6JERAK3hR1AFDS3KnETASVuGdekxkdE9qVn0UNwd130igSUuGVQb5/T0tapdfZEJJEGKgzhlQ3bGFOYz+Glo8IORSR0Stwy6KX1W5gwtojxJUVhhyIiEhmav01kr2Enbmb2umzDzCpSG05ua27t1K1+ERmoe7BCM5tnZsvMbLmZXTfI/iIzuzPY/7SZ1QblBWb2UzN73sxeMLPr0xx/2jW3dVKvJxUiQHJ33H5jZgX9b8ysCngw9SHlrvhVY1nYYYhIBpnZQwcqc/dTBtmfB9wCvBWYCVxhZgNnnv0g0OHu04FvAl8Nyi8Fitz9eKAe+HB/UhdV8YEJuk8gAsklbr8D7jKzvKAReACI/JVcJmk4u8jIYWajgqcSE8ys3MwqglctMHmIw08Clrv7CnfvBu7g9astXAz8NNi+GzjX4pPuOVBsZvnAaOJ39Dan5qwyr2vHblZ1bOeYqmHNriKS84Y9j5u7f9/MCokncLXAh939iTTFlXM2bt3Fhi27mDFRjY/ICPFh4JPAJOIrJvSPHt0M3DzEsZOBVQnvVwMn76+Ou/eYWRcwnngSdzGwDhgD/LO7bzroswhZy6pOjtfclyJ7DJm4mdmnEt8C1UALcIqZneLu30hTbDmlua2TOdVl5MU08l9kJHD3bwHfMrOPu/u3M/jVJwG9xBPGcuBRM/uzu69IrGRm84H5ANXV1RkMLzmaQklkX8O54zbwFtFv9lMuB6DGR2Rkcvdvm9ks4n3VRiWU/+wAh60BEhc0nhKUDVZndfBYtBTYCPwD8Cd33w2sN7PHgQZgn8TN3RcACwAaGhqydkqS5tYO3n96bdhhiGSNIRM3d//ScD7IzL7t7h8/9JByU3NbBx8/Z3rYYYhIhpnZF4GziCdu9xMfcPAYcKDE7VlghpkdQTxBu5x4QpboHuBK4EngEuAv7u5m1gacA9xmZsXAKcCNqTqfTOrtcxau6mSuRuOL7JHKTgOnp/Czcsru3j4Wr+liztSysEMRkcy7BDgXeNXd3w/MJn53bL/cvQe4hvggsBeAu9x9iZl92cwuCqr9EBhvZsuBTwH9U4bcApSY2RLiCeCP3X1Rqk8qE158bQuV44qoKC4MOxSRrKFF5jNg6drNVFeMYeyogqEri0iu2eHufWbWY2bjgPXs+xh0UO5+P/E7dIllNyRs7yQ+9cfA47YOVh5FWuZK5PWUuGWAZv0WGdEazawM+D7x0aVbiT/elCE0t3Zw4hGav00kUSoflWq45H40tXVQr6tGkRHJ3T/q7p3u/j3gPODK4JEpAGZ2XHjRZbcmzX0p8jrJLHn1ulvvA8q+tZ/jRvyyLc9pRKmIAO6+cpD+ZreFEkyW27B1Fx3bupleWRJ2KCJZJZk7boMlTnvK3P0nA3dq2RZY27mDnT191IwfE3YoIpKd9LRiEM2tHcytLiemuS9F9jGcCXjfCpwPTDazmxJ2jQN6hjh8z7ItwWf1L9uyNKHOxcC/B9t3Azfn0rItzW3xzrXxUxIReZ2snUMtTE1tGpggMpjh3HFbCzQCO4l3rO1/3QO8ZYhjB1u2ZeAaffss2wIkLtuyjfiyLW3A/0Zx2RZNvCsikrxmtZ0igxrOBLwLgYVm9otgJu5MyYllW5pbO/jCBQOfDouI7NEddgDZprunjyVrNzN76gGnuxMZkZKZDqTWzP6H1y/bcuQBjhnRy7bs3N3Li69t5YQpanxERjIzmwzUkNDmuvsjwd9TwoorWy1dt5ma8cWa+1JkEMkkbj8Gvkh8AMHZwPsZ+lHriF62ZdHqLo46rIRRBXlhhyIiITGzrwKXEe/b2xsUO/BIaEFluXgXk7KwwxDJSskkbqPd/SEzM3dvBf7dzJqAG/Z3gLv3mFn/si15wI/6l20BGt39HuLLttwWLNuyiXhyB/HRqD8Olm0xIrhsiybeFRHg7cDR7r4r7ECiorm1g3OPnRh2GCJZKZnEbZeZxYCXgmRsDTDkBDsjedmWptYO3j53UthhiEi4VgAFgBK3YXB3Gls38bl5R4cdikhWSiZx+wQwBrgW+A/ij0uvTEdQucDdaW7r4D/erknRRUa47UCLmT1EQvLm7teGF1L2Wtu1k94+p7pCc1+KDGbYiZu7PxtsbiXev20fZvZtd/94qgKLupUbtzMqP0ZV6eiwQxGRcN0TvGQY+heW19yXIoNL5SLzp6fwsyKvWf3bRARw95+aWSFwVFC0LMNTK0WK5m8TObBULjIvCTTrt4gAmNlZwEvEB1x9B3jRzM4MM6Zs1qyF5UUOKJV33CRBc2sHlzVMHbqiiOS6/wPe7O7LAMzsKOCXxNdglgTbu3t46bWtzJqsuS9F9ieVd9zUISGweedu2jZtZ+akcWGHIiLhK+hP2gDc/UXio0xlgEWruzj68LGa+1LkAFJ5x+1bKfysSGtp62TW5FIK8vQkWkRoNLMfALcH799DfP1nGUBrO4sMbdiJm5n9gfhs34m6iDdAt7r7T1IYV6Spj4aIJPgI8DHiUykBPEq8r5sM8FxbB++smxJ2GCJZLZlbQiuITwXy/eC1GdhCfKTU91MfWnT1D2cXEXH3Xe7+DXd/Z/D6plZReD131x03kWFI5lHpae5+YsL7P5jZs+5+YrAslQC9fU5LWyc3XlYWdigiEiIzu8vd321mz/P6pxW4+wkhhJW1XtmwjTGF+Rw2blTYoYhktWQStxIzq3b3NgAzq2bvklfdKY8sol5av4UJY4sYX1IUdigiEq5PBH8vDDWKiNDaziLDk0zi9mngMTN7mfgI0iOAj5pZMfDTdAQXRXpMKiIA7r4u2NwA7HD3vmAqkGOAP4YXWXZqbuugvros7DBEsl4yS17db2YziDc6EJ/9e2ewfWOqA4uqptYOGmoqwg5DRLLHI8AbzKwc+H/As8BlxEeXSqCptYN/OKkm7DBEst6wByeY2ceA0e6+0N0XAqPN7KPpCy2anmvrVOdaEUlk7r4deCfwHXe/FDgu5JiySteO3azp2MExVWPDDkUk6yUzqvSf3L2z/427dwD/lPKIImzj1l1s2LqLGRNLhq4sIiOFmdmpxO+w3ReUaYbZBC2rNPelyHAl819JnpntWR3BzPKAwtSHFF3NbZ3MmVpGLKZFJERkj08C1wO/dfclZnYk8HC4IWUXTQMiMnzJDE74E3Cnmd0avP9wUCYBNT4iMpC7/w34W8L7FeydjFeIr+38/tNrww5DJBKSSdw+TzxZ+0jw/kHgBymPKMKaWzv4+LnTww5DRLKAmd3o7p/cz6ozuPtFIYSVdXr7nIWrOpmr0fgiw5LMqNI+4LvBSwbY3dvH4rVdzJlaFnYoIpIdbgv+/m+oUWS5F1/bQuW4IiqK1fNGZDiGTNz2N+t3P83+Hbd07WaqK8YwdlRB2KGISBZw96Zgs5FgHjfY0z9YM3QHmlo7qNfdNpFhG87ghAuBtxHvz/Yn4iOj3kN8Asn70xdatGjWbxHZj4eAMQnvRwN/HuogM5tnZsvMbLmZXTfI/iIzuzPY/7SZ1SbsO8HMnjSzJWb2vJll7TpSzWo7RZIyZOLm7q3u3gqc5+6fc/fng9fngTenP8RoaGrTVaOIDGqUu2/tfxNsjzlA/f67crcAbwVmAleY2cwB1T4IdLj7dOCbwFeDY/OB24Gr3f044Cxgd2pOJfWa2jSoSyQZyUwHYmZ2esKb05M8Pqc1a0SpiAxum5nV9b8xs3pgxxDHnAQsd/cV7t4N3AFcPKDOxexdbvBu4NxgyqY3A4uCidJx943u3puC80i5DVt30bGtm+mVmvtSZLiSGVX6QeBHZlYavO8E3p/yiCJobecOunv6qBl/wItoERmZPgn8yszWEl/n+XDiS14dyGRgVcL71cDJ+6vj7j1m1gWMB44C3MweACqBO9z9a4d6EunQ3NrB3OpyzX0pkoRkErfFwNeAWmAC8cTtbcBzKY8qYprb4o1PwvzEIiIAuPuzZnYMcHRQtMzd0/noMh84AzgR2A48ZGZN7v5QYiUzmw/MB6iurk5jOPunx6QiyUvmUefviSdqu4lf/W0FtqUjqKjRxLsisj9mNob4PJifcPfFQK2ZXTjEYWuAqQnvpwRlg9YJ+rWVAhuJt8+PuPuGYI3U+4G6Acfi7gvcvcHdGyorKw/izA5dc2sHdeobLJKUZO64TXH3eWmLJMKaWzv4wgUD+w2LiADwY6AJODV4vwb4FXDvAY55FphhZkcE9S8H/mFAnXuAK4EngUuAv7h7/yPSzwUJYzfwRuKDF7JKd08fi9dsZvbU0qEri8geydxxe8LMjk9bJBG1o7uXF1/byglT1PiIyKCmBX3MdgMEd8EO2K/C3XuAa4AHgBeAu4J1Tr9sZv0rLvwQGG9my4FPAdcFx3YA3yCe/LUAze5+H1lmydouaicUa+5LkSQlc8ftDOAqM3sF2EW84fGhJuA1s3nAt4A84Afu/pUB+4uAnwH1xG/zX+buK4N9JwC3AuOAPuBEd9+ZRMxpt2h1J0cdPpZRBXlhhyIi2anbzEYTTGRuZtOIt6EH5O73M2CuTHe/IWF7J3Dpfo69nfiUIFkr3sWkLOwwRCInmcTtrcl+eMJcROcR73fxrJnd4+5LE6rtmYvIzC4nPhfRZQlzEb3X3Rea2XiycC6i5rZOzd8mIgfyReKTl081s58DpwNXhRpRFniurZM3zZwYdhgikTPsR6X9E/EOfA1xWM7PRRRfMaEs7DBEJAuZWQwoB95JPFn7JdDg7n8NMazQuTuNrZs0MEHkIKR7At3B5iKavL86Qb+O181FZGbNZva5NMeaNHenWcPZRWQ/gvVJPxdceN7n7ve6+4aw4wrb2q6d9PY51RWa+1IkWck8Ks20rJ+LaOXG7YzKj1FVOjqj3ysikfJnM/sMcCcJUyi5+6bwQgpXUzANiOa+FEleuhO3ZOYiWr2/uYgAzKx/LqJ9Ejd3XwAsAGhoaPA0nMN+aWF5ERmGy4gPTPjogPIjQ4glK2iJQJGDl+5HpXvmIjKzQuJzEd0zoE7/XESQMBcR8WHwx5vZmCCheyOwlCyix6QiMgwziQ/SWkh8eo5vA8eFGVDY1HaKHLy0Jm65PheRZv0WkWH4KXAscBPxpG0mewdkjTjbu3t46bWtzJqsuS9FDkba+7jl6lxEm3fupm3TdmZOGhd2KCKS3Wa5e+LSKg+bWVY9PcikRau7OFpzX4octHQ/Ks1ZLW2dzJpcSkGe/hGKyAE1m9kp/W/M7GSgMcR4QqW1nUUOTTaPKs1qanxEZJjqiS8Z2Ba8rwaWmdnzDGP1mVzT3NrBu+qnhB2GSGQpcTtIzW0dXHlqbdhhiEj2mxd2ANmif+7L/36nlr0WOVhK3A5Cb5/TsqqTb12uO24icmDDWGFmxHhlwzbGFOZz2LhRYYciElnqoHUQXlq/hQklRVQUF4YdiohIZGjuS5FDp8TtIDRpGhARkaQ1t3VQX10WdhgikabE7SBoYIKISPLibWdF2GGIRJoSt4Og5VpERJLTtWM3azp2cEzV2LBDEYk0JW5J2rh1Fxu3dTNjYknYoYiIREbLqk6On6K5L0UOlf4LSlJzWydzppYRi1nYoYiIRIb6BoukhhK3JKl/m4hI8tTFRCQ1lLglSY2PiEhy+ue+nKs7biKHTIlbErp7+li8tos5U8vCDkVEJDKWvbqFieM096VIKihxS8IL6zZTXTGGsaMKwg5FRCQy4vO36W6bSCoocUuCZv0WEUmeupiIpI4StyQ06apRRCRpTW266BVJFSVuSdBVo4hIctq37KJjWzfTKzX3pUgqKHEbprWdO+ju6aNm/JiwQxERiYzmtg7mVpdr7kuRFFHiNkz9/dvM1PiIiAyXnlSIpJYSt2FqblPjIyKSLLWdIqmlxG2YmrVci4hIUrp7+liydjOzNfelSMoocRuGHd29vPjaVk6YUhp2KCIikbFkbRc144spKcoPOxSRnKHEbRgWre7kqMPHMqogL+xQRGSEMLN5ZrbMzJab2XWD7C8yszuD/U+bWe2A/dVmttXMPpOxoAeIr+1cFtbXi+QkJW7DoPnbRCSTzCwPuAV4KzATuMLMZg6o9kGgw92nA98Evjpg/zeAP6Y71gNR/zaR1FPiNgzNrZ1qfEQkk04Clrv7CnfvBu4ALh5Q52Lgp8H23cC5Fgx7N7O3A68ASzIT7uu5e/yOW3VFWCGI5CQlbkNwd5rbOqjT7X4RyZzJwKqE96uDskHruHsP0AWMN7MS4PPAlzIQ536t7dpJb58ztWJ0mGGI5BwlbkNYuXE7o/JjVJWq8RGRSPh34JvuvvVAlcxsvpk1mllje3t7yoNoCkbia+5LkdTSUJ8haGF5EQnBGmBqwvspQdlgdVabWT5QCmwETgYuMbOvAWVAn5ntdPebEw929wXAAoCGhgZP9Qlo4l2R9Ej7Hbeoj4xqUuMjIpn3LDDDzI4ws0LgcuCeAXXuAa4Mti8B/uJxb3D3WnevBW4E/ntg0pYJajtF0iOtiVsujIx6TqOiRCTDgj5r1wAPAC8Ad7n7EjP7spldFFT7IfE+bcuBTwGvuzAOy/buHpav38qsyZr7UiTV0v2odM/IKAAz6x8ZtTShzsXE+2RAfGTUzWZm7u4JI6O2pTnOQW3euZu2Tds5tmpcGF8vIiOYu98P3D+g7IaE7Z3ApUN8xr+nJbghLFrdxTFVmvtSJB3S/ag00iOjWto6mTW5lII8jeEQERmuJi0RKJI22ZyR/Dshj4xSHw0RkeRpYIJI+qQ7cUtmZBSDjIz6mpmtBD4J/IuZXTPwC9x9gbs3uHtDZWVlSoNv1ooJIiJJcff4ajNK3ETSIt2JW2RHRvX2OS1tnZoKREQkCSs2bKO4MJ/Dxo0KOxSRnJTWwQnu3hPcJXsAyAN+1D8yCmh093uIj4y6LRgZtYl4che6l9ZvoXJsERXFhWGHIiISGXpMKpJeaZ+AN6ojo5paO5irx6QiIklpbuugrros7DBEclY2D04IlQYmiIgkL952amF5kXRR4rYfut0vIpKcrh27WdOxg2OqxoYdikjOUuI2iA1bd7FxWzczJpaEHYqISGQ819bB8VM096VIOum/rkE819bJ3OpyYjELOxQRkchobuvUkwqRNFPiNoj4rN9lYYchIhIpzVoxQSTtlLgNQv3bRESS09vntKzq1Gh8kTRT4jZAd08fi9d2MWdqWdihiIhExrJXtzBxnOa+FEk3JW4DLF23meqKMYwdVRB2KCIikdGkJQJFMkKJ2wCav01EJHnqYiKSGUrcBmjW4sgiIklT2ymSGUrcBtCoKBGR5LRv2UXHtm6mVWruS5F0U+KWYG3nDrp7+qgZPybsUEREIqO5rUNzX4pkiBK3BE2tHdTVlGOmxkdEZLjUv00kc5S4JdDABBGR5KntFMkcJW4JnlPnWhGRpHT39LF03WZma+5LkYxQ4hbY0d3Li69t5fjJpWGHIiISGUvWdlEzvpiSovywQxEZEZS4BRat7uSow8cyqiAv7FBERCIj/pi0LOwwREYMJW4BzfotIpI8zd8mkllK3AIaFSUikhx3j99xq64IOxSREUOJG/HGp7mtkzrd7hcRGba1XTvp7YOpFaPDDkVkxFDiBqzcuJ1R+TGqStX4iIgMV1NrB3XVZZr7UiSDlLixd+JdEREZPnUxEck8JW5o8kgRkYOhtlMk85S4oatGEZFkbe/uYfn6rczS3JciGTXiE7fNO3ezqmM7x1aNCzsUEZHIWLiqi2OqNPelSKaN+MStpa2T4yeXUpA34v9RiEgWMbN5ZrbMzJab2XWD7C8yszuD/U+bWW1Qfp6ZNZnZ88Hfc9IRX7PmvhQJxYjPVjQwQUSyjZnlAbcAbwVmAleY2cwB1T4IdLj7dOCbwFeD8g3A29z9eOBK4LZ0xNistlMkFGlP3HTVKCKStJOA5e6+wt27gTuAiwfUuRj4abB9N3CumZm7P+fua4PyJcBoMytKZXDuHl9tRombSMalNXHL9qvG3j6npa1TV40ikm0mA6sS3q8Oygat4+49QBcwfkCddwHN7r4rlcGt2LCN4sJ8Dhs3KpUfKyLDkO47bll91fjia1uoHFtERXFhKj9WRCR0ZnYc8QvhD+9n/3wzazSzxvb29qQ+W9OAiIQn3YlbVl81Nrepj4aIZKU1wNSE91OCskHrmFk+UApsDN5PAX4LvM/dXx7sC9x9gbs3uHtDZWVlUsE9p8ekIqHJ+sEJ6b5qrFP/NhHJPs8CM8zsCDMrBC4H7hlQ5x7i3UgALgH+4u5uZmXAfcB17v54OoJT2ykSnnQnbll91aiJd0UkGwVPH64BHgBeAO5y9yVm9mUzuyio9kNgvJktBz4F9A/+ugaYDtxgZi3Ba2KqYuvasZs1HTs4pmpsqj5SRJKQn+bP33PVSDxBuxz4hwF1+q8anySDV40btu5i47ZuZkwsSfVHi4gcMne/H7h/QNkNCds7gUsHOe4/gf9MV1zPtXVw/BTNfSkSlrT+l5fNV43NrR3MrS4nFrNUfaSISM7TkwqRcKX7jlvWXjU2af42EZGkNbV18MEzjgg7DJERa8Te636utZO6mrKwwxARiYzePmfhqi7mTtVFr0hYRmTi1t3Tx+K1XcyZWhZ2KCIikbHs1S1MHFdEuea+FAnNiEzclq7bTHXFGMaOKgg7FBGRyFAXE5HwjcjETbN+i4gkTwMTRMI3IhM3NT4iIsnTRa9I+EZm4qblWkREktK+ZRed27uZVqm5L0XCNOISt+6ePi6aPYnqijFhhyIiEhm7enq59twZmvtSJGRpn8ct2xTmx7j+/GPDDkNEJFKmlI/hQ284MuwwREa8EXfHTURERCSqlLiJiIiIRIQSNxEREZGIUOImIiIiEhFK3EREREQiQombiIiISEQocRMRERGJCCVuIiIiIhGhxE1EREQkIszdw44hZcysHWgdZvUJwIY0hpMpuXIeoHPJRsmcR427V6YzGEmPJNrOXPldg84lG+XKecDwzyXpdjOnErdkmFmjuzeEHcehypXzAJ1LNsqV85DUyKXfg84l++TKeUB6z0WPSkVEREQiQombiIiISESM5MRtQdgBpEiunAfoXLJRrpyHpEYu/R50LtknV84D0nguI7aPm4iIiEjUjOQ7biIiIiKREunEzcx+ZGbrzWxxQlmFmT1oZi8Ff8uDcjOzm8xsuZktMrO6hGOuDOq/ZGZXJpTXm9nzwTE3mZml6TymmtnDZrbUzJaY2ScifC6jzOwZM1sYnMuXgvIjzOzp4PvvNLPCoLwoeL882F+b8FnXB+XLzOwtCeXzgrLlZnZdOs4j4bvyzOw5M7s3yucRfN/K4DfQYmaNQVnkfmNy6NR2ZuW5qO3MzvPIvnbT3SP7As4E6oDFCWVfA64Ltq8Dvhpsnw/8ETDgFODpoLwCWBH8LQ+2y4N9zwR1LTj2rWk6jyqgLtgeC7wIzIzouRhQEmwXAE8H33sXcHlQ/j3gI8H2R4HvBduXA3cG2zOBhUARcATwMpAXvF4GjgQKgzoz0/gb+xTwC+De4H0kzyOIZSUwYUBZ5H5jeqXkt6C2M/vORW1ndp7HSrKs3Qy9AUnBP9Ra9m18lgFVwXYVsCzYvhW4YmA94Arg1oTyW4OyKuDvCeX71EvzOf0eOC/q5wKMAZqBk4lPRJgflJ8KPBBsPwCcGmznB/UMuB64PuGzHgiO23NsUL5PvRTHPwV4CDgHuDeIK3LnkfAdK3l9AxTp35heh/R7qEVtZ1aeC2o7s+I8gs9fSZa1m5F+VLofh7n7umD7VeCwYHsysCqh3uqg7EDlqwcpT6vgNvFc4ldbkTyX4BZ5C7AeeJD41VGnu/cM8v17Yg72dwHjSf4c0+FG4HNAX/B+PNE8j34O/D8zazKz+UFZJH9jkhaR/i2o7cyqNudGcqftzLp2Mz/ZM4gSd3cz87DjGC4zKwF+DXzS3TcnPuqO0rm4ey8wx8zKgN8Cx4QbUfLM7EJgvbs3mdlZIYeTKme4+xozmwg8aGZ/T9wZpd+YpFfUfgtqO7NHDradWddu5uIdt9fMrAog+Ls+KF8DTE2oNyUoO1D5lEHK08LMCog3PD93998ExZE8l37u3gk8TPzWdpmZ9V8oJH7/npiD/aXARpI/x1Q7HbjIzFYCdxC/5f+tCJ7HHu6+Jvi7nvj/FE4i4r8xSalI/hbUdmZdm5NTbWdWtpvpei6cqRev76fxdfbtNPi1YPsC9u00+ExQXgG8QrzDYHmwXRHsG9hp8Pw0nYMBPwNuHFAexXOpBMqC7dHAo8CFwK/Yt2PqR4Ptj7Fvx9S7gu3j2Ldj6grinVLzg+0j2Nsx9bg0/8bOYm8H20ieB1AMjE3YfgKYF8XfmF4p+03UorYzm85FbWeWnQdZ2m6G3ngc4j/UXwLrgN3Enw1/kPiz8YeAl4A/J/zDMeAW4n0GngcaEj7nA8Dy4PX+hPIGYHFwzM0EExan4TzOIP4cfRHQErzOj+i5nAA8F5zLYuCGoPzI4Ae6PPgPuCgoHxW8Xx7sPzLhs74QxLuMhJE2wT+bF4N9X8jA7+ws9jY+kTyPIO6FwWtJ//dF8TemV0p+D2o7s+9c1HZm2XmQpe2mVk4QERERiYhc7OMmIiIikpOUuImIiIhEhBI3ERERkYhQ4iYiIiISEUrcRERERCJCiZtkjJl90szGhB2HiEiUqO2URJoORDImmEm7wd03hB2LiEhUqO2URLrjJmlhZsVmdp+ZLTSzxWb2RWAS8LCZPRzUebOZPWlmzWb2q2C9QcxspZl9zcyeN7NnzGx6UH5p8FkLzeyR8M5ORCQ91HbKUJS4SbrMA9a6+2x3nwXcCKwFznb3s81sAvCvwJvcvQ5oBD6VcHyXux9PfCbpG4OyG4C3uPts4KLMnIaISEap7ZQDUuIm6fI8cJ6ZfdXM3uDuXQP2nwLMBB43sxbgSqAmYf8vE/6eGmw/DvzEzP6J+Jp1IiK5Rm2nHFB+2AFIbnL3F82sjviacv9pZg8NqGLAg+5+xf4+YuC2u19tZicTX8i3yczq3X1jqmMXEQmL2k4Ziu64SVqY2SRgu7vfDnwdqAO2AGODKk8Bpyf0wSg2s6MSPuKyhL9PBnWmufvT7n4D0A5MTf+ZiIhkjtpOGYruuEm6HA983cz6gN3AR4jftv+Tma0N+mpcBfzSzIqCY/4VeDHYLjezRcAuoP/K8utmNoP4FedDwMLMnIqISMao7ZQD0nQgknU09F1EJHlqO0cGPSoVERERiQjdcRMRERGJCN1xExEREYkIJW4iIiIiEaHETURERCQilLiJiIiIRIQSNxEREZGIUOImIiIiEhFK3EREREQiQombiIiISEQocRMRERGJCCVuIiIiIhGhxE1EREQkIpS4iYiIiESEEjcRERGRiFDiJiIiIhIRStxEREREIkKJm4iIiEhEKHETERERiQglbiIiIiIRkR92AKk0YcIEr62tDTsMkRGpqalpg7tXhh2HJE9tp0g4DqbdzKnErba2lsbGxrDDEBmRzKw17Bjk4KjtFAnHwbSbelQqIiIiEhFK3EREREQiQombiIiISEQocRMRERGJCCVuIiIiIhGhxE1EREQkIpS4iYiIiESEEjcRERGRiBhxiVt3Tx//c/8LuHvYoYiIRMbqju384NEVYYchMuKNuMStMD/GPQvX0rpxe9ihiIhERlF+Hjc99BJ9fbroFQnTiEvcAOpqymlq7Qg7DBGRyKgcW0R5cSHL27eGHYrIiDYiE7f66nKa2pS4iYgko76mnMaVajtFwjQyE7eacpp1x01EJCn1elohEroRmbjNnDSOVZu207Vjd9ihiIgMm5mNMrNnzGyhmS0xsy8NUucqM2s3s5bg9aFUfX99TTnNelohEqoRmbgV5MWYNbmUllWdYYciIpKMXcA57j4bmAPMM7NTBql3p7vPCV4/SNWXz5g4lg1bd7Fx665UfaSIJGlEJm6gW/4iEj0e1z86oCB4ZWyYZ17MmFuttlMkTCM6cVM/NxGJGjPLM7MWYD3woLs/PUi1d5nZIjO728ympvL7NbhLJFwjNnGrqy6nZVUnvZqTSEQixN173X0OMAU4ycxmDajyB6DW3U8AHgR+OtjnmNl8M2s0s8b29vZhf78uekXCNWITt/LiQg4bV8SyV7eEHYqISNLcvRN4GJg3oHyju/d3QvsBUL+f4xe4e4O7N1RWVg77e+dUl7Fk7Wa6e/oOLnAROSQjNnGD/n5um8IOQ0RkWMys0szKgu3RwHnA3wfUqUp4exHwQipjKCnKp3Z8MYvXdqXyY0VkmJS46Za/iERHFfCwmS0CniXex+1eM/uymV0U1Lk2mCpkIXAtcFWqg9DjUpHw5IcdQJjqa8q5+eHlYYchIjIs7r4ImDtI+Q0J29cD16czjvqach5Y8iofekM6v0VEBjOi77gdOaGEzTt6WL95Z9ihiIhERn1NOY2tHbhrcJdIpo3oxC0WM+qqyzQTuIhIEqaUj8aA1R07wg5FZMQZ0YkbQENthfq5iYgkwczUR1gkJCM+caurjt/yFxGR4VPiJhKOEZ+4zZ5ayt/XbWHn7t6wQxERiYw6JW4ioQglcTOzUWb2jJktDIatf2mQOleZWbuZtQSvD6UjljGF+UyfWMLiNZqTSERkuGZNKmXlxm1s3dUTdigiI0pYd9x2Aee4+2xgDjDPzE4ZpN6d7j4neP0gXcHolr+ISHIK82McN2kcLW2dYYciMqKEkrh53NbgbUHwCm1cuW75i4gkT22nSOaF1sfNzPLMrAVYT3z276cHqfYuM1tkZneb2dR0xdJQU05zm+YkEhFJRn11OU2aTkkko0JL3Ny9193nAFOAk8xs1oAqfwBq3f0E4EHgp4N9jpnNN7NGM2tsb28/qFgmlY2mIC9G68btB3W8iMhIVF9TznNtHfT26aJXJFNCH1Xq7p3Aw8C8AeUb3X1X8PYHQP1+jl/g7g3u3lBZWXnQceiWv4hIcsaXFDGhpIiX1m8JOxSRESOsUaWVZlYWbI8GzgP+PqBOVcLbi4AX0hmTbvmLiCSvrloXvSKZFNYdtyrgYTNbBDxLvI/bvWb2ZTO7KKhzbTBVyELgWuCqdAZUX1NOsxofEZGkNNQqcRPJpPwwvtTdFwFzBym/IWH7euD6TMU0c9I42jZtZ/PO3YwbVZCprxURibT6mnK+97eXww5DZMQIvY9btijIi3H85FLNSSQikoTplSV0bOumfcuuoSuLyCFT4pagvkbrloqIJCMWM+ZWx6dUEpH0U+KWQP3cRESS16BR+SIZo8QtwdzqclpWdWpOIhGRJGjZQJHMUeKWoKK4kInjilj2quYkEhEZrtlTy1i6djO7enrDDkUk5ylxG0DzuYmIJKe4KJ8jK4tZvGZz2KGI5DwlbgM01Kqfm4hIsuL93DaFHYZIzlPiNkB8ZKkaHxGRZGjZQJHMUOI2wJETSti8o4f1m3eGHYqISGTEByh04q7BXSLppMRtgFjMqKsu05xEIiJJmFw2mrwYrNq0I+xQRHKaErdBaGi7iEhyzIyGmgp1NRFJMyVug6ivqVDiJiKSJPVzE0k/JW6DmD21lBfWbWHnbs1JJCIyXHpaIZJ+StwGMaYwn+kTS1i8pivsUEREImNm1TjaNm1ny87dYYcikrOUuO2HrhxFRJJTmB9j1uRSnmvrDDsUkZylxG0/1FdDRCR5uugVSS8lbvtRX1NOc1uH5iQSkaxhZqPM7BkzW2hmS8zsS4PUKTKzO81suZk9bWa1mYyxvrpc0ymJpJESt/2YXDaagrwYbZu2hx2KiEi/XcA57j4bmAPMM7NTBtT5INDh7tOBbwJfzWSAdTXltLR10tuni16RdFDidgB1NeU0rtSVo4hkB4/bGrwtCF4DM6SLgZ8G23cD55qZZShEKooLqRxXxLJXt2TqK0VGFCVuB1BfXU6TbvmLSBYxszwzawHWAw+6+9MDqkwGVgG4ew/QBYzPZIxqO0XSR4nbAdTXlNOsTrYikkXcvdfd5wBTgJPMbNbBfI6ZzTezRjNrbG9vT2mMajtF0keJ2wHMnBSfk2iz5iQSkSzj7p3Aw8C8AbvWAFMBzCwfKAU2DnL8AndvcPeGysrKlMbWUKuRpSLposTtAAry4nMStWhOIhHJAmZWaWZlwfZo4Dzg7wOq3QNcGWxfAvzFMzw8/sgJJXTt2M36LTsz+bUiI4IStyE0aE4iEckeVcDDZrYIeJZ4H7d7zezLZnZRUOeHwHgzWw58Crgu00HGYkZddZkel4qkQX7YAWS7+ppyfvz4yrDDEBHB3RcBcwcpvyFheydwaSbjGkz/RLzzZlWFHYpITtEdtyHMrS6nZZXmJBIRSUZ9TYWeVoikgRK3IVQUFzJRcxKJiCRl9tRSXli3hZ27e8MORSSnKHEbBs1JJCKSnDGF+UyfWMLiNV1hhyKSU5S4DYPmJBIRSZ4WnBdJPSVuw6A5iUREkqfETST1QknczGyUmT1jZgvNbImZfWmQOkVmdqeZLTezp82sNoRQAc1JJCJyMPoTtwxPIyeS08K647YLOMfdZwNzgHlmdsqAOh8EOtx9OvBN4KuZDXEvzUkkIpK8SWWjKcyP0bpxe9ihiOSMUBI3j9savC0IXgMvyS4Gfhps3w2ca2aWoRBfR7f8RUSSV6e2UySlQuvjZmZ5ZtYCrCc++/fTA6pMBlYBuHsP0AWMz2iQCdT4iIgkr6GmnEa1nSIpE1ri5u697j4HmAKcZGazDuZzzGy+mTWaWWN7e3tKY0w0Z2qZ5iQSEUmSRuWLpFboo0rdvRN4GJg3YNcaYCqAmeUDpcDGQY5f4O4N7t5QWVmZtjj75yRaslZzEomIDNexVeNY3bGdrh27ww5FJCeENaq00szKgu3RwHnA3wdUuwe4Mti+BPiLhzw0qb6mnMaVunIUERmugrwYx08ppWVVZ9ihiOSEsO64VQEPm9ki4FnifdzuNbMvm9lFQZ0fAuPNbDnwKeC6kGLdQ/3cRESSV19TTtPKTWGHIZIT8sP4UndfBMwdpPyGhO2dwKWZjGso9TXlfPkPS3B3QhzgKiISKfU15fzwsVfCDkMkJ4Texy1KJpWOIj8Wo22T5iQSERmuuupyFq7qoqe3L+xQRCJPiVsSzIx6LX8lIpKUsjGFHF46imWvbQk7FJHIU+KWpPpqJW4iIslS2ymSGkrckqQVFEREkqe2UyQ1lLglaeakcbRt2s7mnZqTSERkuDQqXyQ1lLglqSAvxqzJpbS0dYYdiohIZEyrLGbbrh5e27wz7FBEIk2J20HQLX8RkeSYGXXq5yZyyJS4HYSGmnKa29T4iIgkQ49LRQ6dEreDMLe6nJa2Tnr7Ql2BS0QkUvS0QuTQKXE7CBXFhVSOK2LZq5qTSERkuGZPKWPZq1vYubs37FBEIkuJ20Gqry6nSY9LRUSGbXRhHkcdVsKi1V1hhyISWUrcDlJ9TTnNuuUvIpIU9XMTOTRK3A6S+mqIiCRPbafIoVHidpCmVZbQtWM367doTiIRkeGqD0blu2twl8jBUOJ2kGIxo666jObWzrBDERGJjKrS0YwuyOOVDdvCDkUkkpS4HYL4Lf9NYYchIhIp6ucmcvCUuB0CNT4iIsmrry7TJOYiB0mJ2yGYPaWMF9ZpTiIRyQwzm2pmD5vZUjNbYmafGKTOWWbWZWYtweuGMGI9kIbaCl30ihyk/LADiLLionymTSxmydou6msqwg5HRHJfD/Bpd282s7FAk5k96O5LB9R71N0vDCG+YTnm8LGs6dhB1/bdlI4pCDsckUjRHbdD1FCjK0cRyQx3X+fuzcH2FuAFYHK4USUvPy/GCVPKaF6ltlMkWUrcDlFdTTmNK9X4iEhmmVktMBd4epDdp5rZQjP7o5kdl9nIhqehVpOYixwMJW6HSHMSiUimmVkJ8Gvgk+6+ecDuZqDG3WcD3wZ+t5/PmG9mjWbW2N7entZ4B6OLXpGDo8TtEE0qHUV+LEbbpu1hhyIiI4CZFRBP2n7u7r8ZuN/dN7v71mD7fqDAzCYMUm+Buze4e0NlZWXa4x6obmo5i1Z30tPbl/HvFokyJW6HyMy0hIuIZISZGfBD4AV3/8Z+6hwe1MPMTiLezm/MXJTDUzqmgEllo/n7q1vCDkUkUpS4pYDmcxORDDkdeC9wTsJ0H+eb2dVmdnVQ5xJgsZktBG4CLvcs7cvRUKu2UyRZmg4kBRpqyvlV46qwwxCRHOfujwE2RJ2bgZszE9Ghqasu55GXNnDlabVhhyISGbrjlgIzJ42jbdN2Nu/cHXYoIiKRUV+jkaUiyVLilgIFeTFmTS6lpa0z7FBERCLjiAnF7Njdy7quHWGHIhIZStxSRAMURESSY2bUVZfT3NoZdigikaHELUXqq8u1aLKISJLqa8ppbN0UdhgikaHELUXqasppaeukty8rB2+JiGQl9XMTSU4oiZuZTTWzh81sqZktMbNPDFLnLDPrShjyfkMYsQ5XRXEhleOKePE1zUkkIjJcJ0wp5cXXtrKjuzfsUEQiIaw7bj3Ap919JnAK8DEzmzlIvUfdfU7w+nJmQ0xefXU5jbpyFBEZtlEFeRx9+FgWre4MOxSRSAglcXP3de7eHGxvAV4AJocRSyrplr+ISPLi/dzUdooMR+h93MysFpgLPD3I7lPNbKGZ/dHMjstsZMnTyFIRkeTpoldk+EJN3MyshPhiyZ90980DdjcDNe4+G/g28Lv9fMZ8M2s0s8b29va0xjuUaZUldO3YzfotO0ONQ0QkSuprymlq6yBLV+YSySopSdzM7Awze3+wXWlmRwzjmALiSdvP3f03A/e7+2Z33xps3w8UmNmEQeotcPcGd2+orKw85HM5FLGYUVddpjmJRESScNi4UZQU5bNiw7awQxHJeoecuJnZF4HPA9cHRQXA7UMcY8APgRfc/Rv7qXN4UA8zOymIdeOhxptu9TWaz01EhsfMaszsTcH2aDMbG3ZMYamvKadppdpOkaGk4o7bO4CLgG0A7r4WGKrxOR14L3BOwnQf55vZ1WZ2dVDnEmCxmS0EbgIu9wjcR6+rKadxpSaTFJEDM7N/Au4Gbg2KprCfLiEjgfoIiwxPfgo+o9vd3cwcwMyKhzrA3R8DbIg6NwM3pyC+jJo9pYwX1m1h5+5eRhXkhR2OiGSvjwEnEQzMcveXzGxiuCGFp76mnJ892Rp2GCJZLxV33O4ys1uBsuAK8s/A91PwuZFUXJTPtInFLFnbFXYoIpLddrl7d/8bM8sHsv6pQrocfdhYXu3aSef27qEri4xgh5y4ufv/Er/d/2vgaOAGd//2oX5ulNVX65a/iAzpb2b2L8BoMzsP+BXwh5BjCk1+XozZU0vVR1hkCKkYnFAM/MXdP0v8TtvoYMToiFVfW6HETUSGch3QDjwPfBi4H/jXUCMKmS56RYaWikeljwBFZjYZ+BPxQQc/ScHnRla8k22n5iQSkf1y9z53/767X+rulwTbI7rR0EWvyNBSkbiZu28H3gl8190vBbJ+lYN0mlQ6ivyY0bZpe9ihiEiWMrMZZna3mS01sxX9r7DjCtOcqWU8v7qL3b19YYcikrVSkriZ2anAe4D7grIRPZzSzDS0XUSG8mPgu0APcDbwM4aYAzPXlY4uYEr5GF5YN3AhHRHpl4rE7ZPEJ9/9rbsvMbMjgYdT8LmRVqfETUQObLS7P0T8qUWru/87cEHIMYVObafIgaViVOnf3P0id/9q8H6Fu1976KFFm+64icgQdplZDHjJzK4xs3cAJWEHFbYGtZ0iB5SKUaUNZvYbM2s2s0X9r1QEF2XHTRpH26btbNm5O+xQRCQ7fQIYA1wL1AP/CLwv1IiygC56RQ4sFSsn/Bz4LPEh7epRGijIizFrciktqzp5w4zKsMMRkezjwG1ADfE1niE+pdIJoUWUBWrGj6G7p4+1nTuYVDY67HBEsk4qErd2d78nBZ+Tc+prymlc2aHETUQGo4veQZjZnn5uStxEXi8VidsXzewHwEPArv5Cd/9NCj470uqry/npkyvDDkNEspMuevejv5/b22ZPCjsUkayTisTt/cAxxG/19181OjDiE7e6mnL++c4WevucvJiFHY6IZBdd9O5HfU05X/rD0rDDEMlKqUjcTnT3o1PwOTmnoriQyrFFvPjaFo6tGhd2OCKSXXTRux+zJpeyfP1Wtnf3MKYwFf+bEskdqfgv4gkzm+nuujwaRP8IKSVuIjKALnr3Y1RBHsdUjWXhqi5OnTY+7HBEssohTQdiZga8EWgxs2XBVCDPazqQveprymnW0HYReb0nzGxm2EFkq4aacprb1HaKDHRId9zc3c1sIjAjRfHknPqacr7z15fDDkNEss8pxC96XyHex82IN6v7nQ7EzKYSXxrrMOKPVRe4+7cG1DHgW8D5wHbgKndvTs8ppE99TTl3Prsq7DBEsk4qHpX+Gpjo7s+m4LNyzrTKEjq3d7N+y04mjh0Vdjgikj3mHcQxPcCn3b3ZzMYCTWb24ICuKm8lfjE9AziZ+HqoJx9ytBlWV1PO53/9PH19TkyDu0T2SMVapScDT5rZy3pU+nqxWHxOoubWzrBDEZEsEqxP+rrXEMes67975u5bgBeAyQOqXQz8zOOeAsrMrCotJ5FGE8eOonR0ASs2bA07FJGskoo7bm9JwWfktP6+GvNmHR52KCKSI8ysFpgLPD1g12Qg8Rnj6qBsXWYiS53+wV3TJ44NOxSRrJGKReaTvmocaeq09p6IpJCZlRDvpvJJd998kJ8x38wazayxvb09tQGmSF2w+oyI7JWKR6UyhNlTyli6djO7enrDDkVEIs7MCognbT/fz2S9a4CpCe+nBGX7cPcF7t7g7g2Vldm5LF99dTlNGlkqsg8lbhlQXJTPtInFLF7TFXYoIhJhwYjRHwIvuPs39lPtHuB9FncK0OXukXtMCnD04WNp37yLTdu6ww5FJGsoccuQ+mo9LhWRQ3Y68F7gHDNrCV7nm9nVZnZ1UOd+YAWwHPg+8NGQYj1keTFjTnUZz+mum8geWkskQ+pqyrn/+Uhe9IpIlnD3x4jP93agOg58LDMRpV9ddTmNrR2ce+xhYYcikhV0xy1DGmoraGrtJN6miojIcNRrcJfIPpS4Zcik0lHkx4xVm3aEHYqISGTMrS5j8Zoudvf2hR2KSFZQ4pYhZkZ9TTmNrZvCDkVEJDLGjiqgumIMS9ce1KwnIjlHiVsGaT43EZHkxS961XaKgBK3jFJfDRGR5NXXlNOstlMEUOKWUTOrxtG2aTtbdu4OOxQRkchoqKmgsXWTBneJEFLiZmZTzexhM1tqZkvM7BOD1DEzu8nMlgeL19eFEWsqFebHmDW5lJZVnWGHIiISGVMrRtPbB2u7doYdikjowrrj1gN82t1nAqcAHzOzmQPqvBWYEbzmA9/NbIjpocelIiLJiQ/uKqNxpQZ3iYSSuLn7OndvDra3AC8AkwdUuxj4mcc9BZSZWVWGQ005raAgIpI89XMTiQu9j5uZ1QJzgacH7JoMrEp4v5rXJ3eRU1dTTktbJ7196qshIjJc9TUVWnBehJATNzMrAX4NfNLdD2qSHjObb2aNZtbY3t6e2gDToKK4kMqxRbz42pawQxERiYxZk8fx8vptbNvVE3YoIqEKLXEzswLiSdvP3f03g1RZA0xNeD8lKNuHuy9w9wZ3b6isrExPsCmm+dxERJJTlJ/HzEnjWKjBXTLChTWq1IAfAi+4+zf2U+0e4H3B6NJTgC53z4lV2hvUV0NEJGka3CUS3h2304H3AueYWUvwOt/Mrjazq4M69wMrgOXA94GPhhRrytXXlKuvhohIktR2ikB+GF/q7o8BNkQdBz6WmYgya1plCR3bumnfsovKsUVhhyMiEgl11eV89lcL6etzYrED/i9EJGeFPqp0JIrFTP3cRESSVDm2iPLiQpa3bw07FJHQKHELSX11Oc265S8ikhT1c5ORTolbSOpr1fiIiCRLiZuMdErcQjJ7ShlL125mV09v2KGIiESGEjcZ6ZS4haS4KJ9pE4tZvOag5h0WERmRZkwcy4atu9i4dVfYoYiEQolbiOLrlmrRZBGR4cqLGXOry2lu6ww7FJFQKHELkUaWiogkL37Rq7ZTRiYlbiGK99XoJD5lnYiIDEe87dTTChmZlLiFaHLZaPJjxqpNO8IORUQkMuZUl7Fk7Wa6e/rCDkUk45S4hcjMgiVcdOUoIjJcJUX51I4vZsnarrBDEck4JW4hUz83EZHkaVoQGamUuIWsvqacxpVqfEREkqHETUYqJW4hm1k1jrZN29myc3fYoYiIREZ9TTmNrR0a3CUjjhK3kBXmx5g1qZSWVZ1hhyIiEhlTykdjwOoODe6SkUWJWxbQuqUiIsnpH9zV3Ka2U0YWJW5ZQJNJiogkT32EZSRS4pYF6mrKaVnVSW+f+mqIyP6Z2Y/MbL2ZLd7P/rPMrMvMWoLXDZmOMZM0Kl9GIiVuWaCiuJDKkiJefG1L2KGISHb7CTBviDqPuvuc4PXlDMQUmlmTSlm5cRtbd/WEHYpIxihxyxK6chSRobj7I4Bm7A4U5sc4btI4Fmpwl4wgStyyRH1NOc1K3ETk0J1qZgvN7I9mdlzYwaRbnfq5yQijxC1LNNSU06TRUSJyaJqBGnefDXwb+N3+KprZfDNrNLPG9vb2TMWXcg01FWo7ZURR4pYlplWW0LGtm/Ytu8IORUQiyt03u/vWYPt+oMDMJuyn7gJ3b3D3hsrKyozGmUp11WU819ZBnwZ3yQihxC1LxGJGneYkEpFDYGaHm5kF2ycRb+M3hhtVeo0vKWJCSREvrtfgLhkZlLhlEc3nJiIHYma/BJ4Ejjaz1Wb2QTO72syuDqpcAiw2s4XATcDlPgLWhKpT2ykjSH7YAche9TXl/N+DL4YdhohkKXe/Yoj9NwM3ZyicrNFQW86zKzfxnpNrwg5FJO10xy2LzJ5axtK1m9nV0xt2KCIikaFR+TKSKHHLIsVF+UybWMziNZvDDkVEJDKmV5awSYO7ZIRQ4pZl6qt15SgikoxYzJhbrcFdMjIoccsyWkFBRCR5DXpcKiOEErcsU19TTmNrByNgIJiISMrU66JXRgglbllmctlo8mKwatOOsEMREYmM2VPLWKLBXTIChJK4mdmPzGy9mS3ez/6zzKzLzFqC1w2ZjjEsZhYs4aJ1pEVEhqu4KJ8jKzW4S3JfWHfcfgLMG6LOo+4+J3h9OQMxZQ31cxMRSZ76uclIEEri5u6PALqltB/xvhqdYYchIhIpuuiVkSCb+7idamYLzeyPZnZc2MFk0syqcbRu3MaWnbvDDkVEJDI0uEtGgmxN3JqBGnefDXwb+N3+KprZfDNrNLPG9vb2TMWXVoX5MWZNKqVlVWfYoYiIRIYGd8lIkJWJm7tvdvetwfb9QIGZTdhP3QXu3uDuDZWVlRmNM510y19EJDka3CUjQVYmbmZ2uJlZsH0S8Tg3hhtVZjUocRMRSZoueiXXhTUdyC+BJ4GjzWy1mX3QzK42s6uDKpcAi81sIXATcLmPsE4LdTXltKzqpLdvRJ22iMghqa8pp3GlEjfJXflhfKm7XzHE/puBmzMUTlaqKC6ksqSIl9Zv4ZjDx4UdjohIJMysGkfbpu1s2bmbsaMKwg5HJOWy8lGpxNXpylFEJCmF+TFmTdbgLsldStyyWL0mkxQRSZrWLZVcpsQti9XXlNPUpsZHRCQZ9dVK3CR3KXHLYtMrS+jY1k37ll1hhyIiEhl1NeW0tGlwl+QmJW5ZLBYz6mrKadZdNxGRYasoLqRyXBEvvrYl7FBEUk6JW5arr1Y/NxGRZOlxqeQqJW5Zrn/tPRERGT4NUJBcpcQty82eWsbStZvZ1dMbdigiIpHRUKvETXKTErcsV1yUz5GVxSxesznsUEREIuPICSV07djN+i07ww5FJKWUuEVAg+ZzExFJSixm1FWXqe2UnKPELQK0aLKISPLUz01ykRK3COifiNddcxKJiAxXfU2FEjfJOUrcImBy2WhiBqs27Qg7FBGRyJg9tZQX1m1h524N7pLcocQtAswsuOu2KexQREQiY0xhPtMnlrB4TVfYoYikjBK3iKjTZJIiIklTPzfJNUrcIqKhtoKm1s6wwxCREJnZj8xsvZkt3s9+M7ObzGy5mS0ys7pMx5htlLhJrlHiFhEzq8bRunEbW3buDjsUEQnPT4B5B9j/VmBG8JoPfDcDMWW1+mC9Zw3uklyhxC0iCvNjzJpUysJV6qshMlK5+yPAgTq7Xgz8zOOeAsrMrCoz0WWnSWWjKciL0bpxe9ihiKSEErcIqaspp7FVAxREZL8mA6sS3q8OykY0zYUpuUSJW4Sor4aIpIqZzTezRjNrbG9vDzuctGoI5sIUyQVK3CKkvqacllWd9Papr4aIDGoNMDXh/ZSg7HXcfYG7N7h7Q2VlZUaCC0u9lg2UHKLELUIqigupLCnipfVbwg5FRLLTPcD7gtGlpwBd7r4u7KDCdmzVOFZt2k7XDg3ukuhT4hYx6qshMnKZ2S+BJ4GjzWy1mX3QzK42s6uDKvcDK4DlwPeBj4YUalYpyItx/JRSWlZ1hh2KyCHLDzsASU59TTnPrtzEe06uCTsUEckwd79iiP0OfCxD4URKfx/hNx6V24+FJffpjlvEaICCiEjy1M9NcoUSt4iZXllCx7Zu2rfsCjsUEZHIqKuOD+7q6e0LOxSRQ6LELWJiMaMumAlcRESGp2xMIYeXjmLZaxrcJdGmxC2C6qt1y19EJFlqOyUXKHGLIPVzExFJntpOyQVK3CJo9tQylq7bzK6e3rBDERGJjPiygUrcJNqUuEVQcVE+R0woZvGazWGHIiISGdMqi9m2q4fXNu8MOxSRgxZa4mZmPzKz9Wa2eD/7zcxuMrPlZrbIzOoyHWM209B2EZHkmBl16ucmERfmHbefAPMOsP+twIzgNR/4bgZiigz11RARSZ5Wn5GoCy1xc/dHgE0HqHIx8DOPewooM7OqzESX/eprymlq6yA+UbqIiAxHvfq5ScRlcx+3ycCqhPergzIBJpeNJmawumNH2KGIiETG7CllLHt1Czt3a3CXRFM2J27DYmbzzazRzBrb29vDDidjzCy4cjzQTUsREUk0ujCPow4r4fk1XWGHInJQsnmR+TXA1IT3U4Kyfbj7AmABQENDw4h6blhXXc59i15lbFEB+XlGYV6M/LwY+XlGQSz4m2fk79mOkR8z8vNie8oL8gwzC/tUREQypq6mnP+87wWmVRZTmBejIHgV5scoDNrKgvygLHhfmJ9Yz/ZsF+TFKNqzb2Dd4H1ejFhM7aykRjYnbvcA15jZHcDJQJe7rws5pqxy3szDaFzZwS+eaWN3bx89vc7u3j529zk9/e/74n979lfe5+TFjPxYkNglJHT9CeCe8rwYBTHbbxK4b8K4n/39n5/f/1l79+fFAIyYxe8oGhCLgWFgEOsvM8PiRfF6wXYsZkHZ3uPNgs8jqBds93+uGcTb0/7tfb+DAZ/bvz8xnn2+I7Y3rqDans8JjiQxT07ct3e7f58lbO8tE5FD85E3TuPJFRvp7uljd3+72dtHd28fu3ucXT19bNnVE+zvf3mwf9+63b0D6vT07S3r2XtcfxtbkGcDksB9E8T+pLFwQL196uTHk8H4fgvaq3g7FAv+MuC9JbZx+/m7T9trQTuZ+N72tqOJ7/uPG9h+DvrZg8UUG9BOA9i+beae9n7P+70N5mD747sS2lV7fTu7z+dFqG0NLXEzs18CZwETzGw18EWgAMDdvwfcD5wPLAe2A+8PJ9LsVTO+mO+9t/6QPsM9nrztL8nb3ev09O1NCnv6fE+S2NMX7N9ne/AEcXdvHzt2B+UJn7E3geyjt89xBw/i6t/uC7b7goEY/dt7/gIkbLs7fa/7HKevb5AyD94Hn9u/3f/5g5X1BV/o+3xnEF9inEF5UHtP7HtuCyfs21tv77+XvduD/7sb2AD1b/fvS2zY9tbr3z5woti/MaGkiIc/c9bgAYhE1MRxo7h4Tua6TPe3s7v7E8Pe3njCmJgE9ieQPX3sSkj69u6Pl3UnJIjbguSyL6Hd6XMPXnvbr8Hex9u0/nZtb3u2p01MeL/PZ/cN0tYO/EtCvYS2dm+7eoD39Ld5+7aZA9vpPTUG2R8v3vv/EPz17ezA+omGTBTZNxF8XRtqcOaMSm55T3pmMQstcXP3K4bY78DHMhTOiGVmwe18GE1e2OHIEPY0WAMaoP7t/n2JiSIJ5fF6w0wUnb0tkogctL3tbAwKIbhHIVkksW19XaI4oD11H5AYDqiPQ15e+hrPbH5UKiID7Lmztk+boOxKRORQvL5tzd52NfKjSkVERERGCiVuIiIiIhGhxE1EREQkIpS4iYiIiESEEjcRERGRiFDiJiIiIhIRStxEREREIkKJm4iIiEhEKHETERERiQjzwRbqiigzawdah1l9ArAhjeFkSq6cB+hcslEy51Hj7pXpDEbSI4m2M1d+16BzyUa5ch4w/HNJut3MqcQtGWbW6O4NYcdxqHLlPEDnko1y5TwkNXLp96BzyT65ch6Q3nPRo1IRERGRiFDiJiIiIhIRIzlxWxB2ACmSK+cBOpdslCvnIamRS78HnUv2yZXzgDSey4jt4yYiIiISNSP5jpuIiIhIpEQ6cTOzH5nZejNbnFBWYWYPmtlLwd/yoNzM7CYzW25mi8ysLuGYK4P6L5nZlQnl9Wb2fHDMTWZmaTqPqWb2sJktNbMlZvaJCJ/LKDN7xswWBufypaD8CDN7Ovj+O82sMCgvCt4vD/bXJnzW9UH5MjN7S0L5vKBsuZldl47zSPiuPDN7zszujfJ5BN+3MvgNtJhZY1AWud+YHDq1nVl5Lmo7s/M8sq/ddPfIvoAzgTpgcULZ14Drgu3rgK8G2+cDfwQMOAV4OiivAFYEf8uD7fJg3zNBXQuOfWuazqMKqAu2xwIvAjMjei4GlATbBcDTwffeBVwelH8P+Eiw/VHge8H25cCdwfZMYCFQBBwBvAzkBa+XgSOBwqDOzDT+xj4F/AK4N3gfyfMIYlkJTBhQFrnfmF4p+S2o7cy+c1HbmZ3nsZIsazdDb0BS8A+1ln0bn2VAVbBdBSwLtm8FrhhYD7gCuDWh/NagrAr4e0L5PvXSfE6/B86L+rkAY4Bm4GTiExHmB+WnAg8E2w8Apwbb+UE9A64Hrk/4rAeC4/YcG5TvUy/F8U8BHgLOAe4N4orceSR8x0pe3wBF+jem1yH9HmpR25mV54Lazqw4j+DzV5Jl7WakH5Xux2Huvi7YfhU4LNieDKxKqLc6KDtQ+epBytMquE08l/jVViTPJbhF3gKsBx4kfnXU6e49g3z/npiD/V3AeJI/x3S4Efgc0Be8H080z6OfA//PzJrMbH5QFsnfmKRFpH8Lajuzqs25kdxpO7Ou3cxP9gyixN3dzDzsOIbLzEqAXwOfdPfNiY+6o3Qu7t4LzDGzMuC3wDHhRpQ8M7sQWO/uTWZ2VsjhpMoZ7r7GzCYCD5rZ3xN3Ruk3JukVtd+C2s7skYNtZ9a1m7l4x+01M6sCCP6uD8rXAFMT6k0Jyg5UPmWQ8rQwswLiDc/P3f03QXEkz6Wfu3cCDxO/tV1mZv0XConfvyfmYH8psJHkzzHVTgcuMrOVwB3Eb/l/K4LnsYe7rwn+rif+P4WTiPhvTFIqkr8FtZ1Z1+bkVNuZle1mup4LZ+rF6/tpfJ19Ow1+Ldi+gH07DT4TlFcArxDvMFgebFcE+wZ2Gjw/TedgwM+AGweUR/FcKoGyYHs08ChwIfAr9u2Y+tFg+2Ps2zH1rmD7OPbtmLqCeKfU/GD7CPZ2TD0uzb+xs9jbwTaS5wEUA2MTtp8A5kXxN6ZXyn4TtajtzKZzUduZZedBlraboTceh/gP9ZfAOmA38WfDHyT+bPwh4CXgzwn/cAy4hXifgeeBhoTP+QCwPHi9P6G8AVgcHHMzwYTFaTiPM4g/R18EtASv8yN6LicAzwXnshi4ISg/MviBLg/+Ay4KykcF75cH+49M+KwvBPEuI2GkTfDP5sVg3xcy8Ds7i72NTyTPI4h7YfBa0v99UfyN6ZWS34Pazuw7F7WdWXYeZGm7qZUTRERERCIiF/u4iYiIiOQkJW4iIiIiEaHETURERCQilLiJiIiIRIQSNxEREZGIUOImGWNmnzSzMWHHISISJWo7JZGmA5GMCWbSbnD3DWHHIiISFWo7JZHuuElamFmxmd1nZgvNbLGZfRGYBDxsZg8Hdd5sZk+aWbOZ/SpYbxAzW2lmXzOz583sGTObHpRfGnzWQjN7JLyzExFJD7WdMhQlbpIu84C17j7b3WcBNwJrgbPd/WwzmwD8K/Amd68DGoFPJRzf5e7HE59J+sag7AbgLe4+G7goM6chIpJRajvlgJS4Sbo8D5xnZl81sze4e9eA/acAM4HHzawFuBKoSdj/y4S/pwbbjwM/MbN/Ir5mnYhIrlHbKQeUH3YAkpvc/UUzqyO+ptx/mtlDA6oY8KC7X7G/jxi47e5Xm9nJxBfybTKzenffmOrYRUTCorZThqI7bpIWZjYJ2O7utwNfB+qALcDYoMpTwOkJfTCKzeyohI+4LOHvk0Gdae7+tLvfALQDU9N/JiIimaO2U4aiO26SLscDXzezPmA38BHit+3/ZGZrg74aVwG/NLOi4Jh/BV4MtsvNbBGwC+i/svy6mc0gfsX5ELAwM6ciIpIxajvlgDQdiGQdDX0XEUme2s6RQY9KRURERCJCd9xEREREIkJ33EREREQiQombiIiISEQocRMRERGJCCVuIiIiIhGhxE1EREQkIpS4iYiIiETE/wfMLUbmBjr+zQAAAABJRU5ErkJggg==\n"
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "if EVALUATE_WHILE_TRAINING:\n",
    "    logs = evaluation_logger.get_log()\n",
    "    for i, (m, v) in enumerate(logs.items(), 1):\n",
    "        sb.glue(\"eval_{}\".format(m), v)\n",
    "        x = [save_checkpoints_steps*i for i in range(1, len(v)+1)]\n",
    "        plot.line_graph(\n",
    "            values=list(zip(v, x)),\n",
    "            labels=m,\n",
    "            x_name=\"steps\",\n",
    "            y_name=m,\n",
    "            subplot=(math.ceil(len(logs)/2), 2, i),\n",
    "        )"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 3.2 TensorBoard\n",
    "\n",
    "Once the train is done, you can browse the details of the training results as well as the metrics we logged from [TensorBoard](https://www.tensorflow.org/guide/summaries_and_tensorboard).\n",
    "[]()|[]()|[]()\n",
    ":---:|:---:|:---:\n",
    "<img src=\"https://recodatasets.z20.web.core.windows.net/images/tensorboard_0.png?sanitize=true\"> |  <img src=\"https://recodatasets.z20.web.core.windows.net/images/tensorboard_1.png?sanitize=true\"> | <img src=\"https://recodatasets.z20.web.core.windows.net/images/tensorboard_2.png?sanitize=true\">\n",
    "\n",
    "To open the TensorBoard, open a terminal from the same directory of this notebook, run `tensorboard --logdir=model_checkpoints`, and open http://localhost:6006 from a browser.\n",
    "\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 4. Test and Export Model\n",
    "\n",
    "#### 4.1 Item rating prediction"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Calling model_fn.\n",
      "INFO:tensorflow:Done calling model_fn.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/engine/base_layer_v1.py:1684: UserWarning: `layer.add_variable` is deprecated and will be removed in a future version. Please use `layer.add_weight` method instead.\n",
      "  warnings.warn('`layer.add_variable` is deprecated and '\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Graph was finalized.\n",
      "INFO:tensorflow:Restoring parameters from /var/folders/0g/hn4z3dz916b61gmv11shxdf80000gn/T/tmpj3p39lfp/model.ckpt-50000\n",
      "INFO:tensorflow:Running local_init_op.\n",
      "INFO:tensorflow:Done running local_init_op.\n"
     ]
    },
    {
     "data": {
      "application/scrapbook.scrap.json+json": {
       "name": "rmse",
       "data": 0.9526254346330766,
       "encoder": "json",
       "version": 1
      }
     },
     "metadata": {
      "scrapbook": {
       "name": "rmse",
       "data": true,
       "display": false
      }
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "application/scrapbook.scrap.json+json": {
       "name": "mae",
       "data": 0.7576763784003258,
       "encoder": "json",
       "version": 1
      }
     },
     "metadata": {
      "scrapbook": {
       "name": "mae",
       "data": true,
       "display": false
      }
     },
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{'rmse': 0.9526254346330766, 'mae': 0.7576763784003258}\n"
     ]
    }
   ],
   "source": [
    "if len(RATING_METRICS) > 0:\n",
    "    predictions = list(model.predict(input_fn=tf_utils.pandas_input_fn(df=test)))\n",
    "    prediction_df = test.drop(RATING_COL, axis=1)\n",
    "    prediction_df[PREDICT_COL] = [p['predictions'][0] for p in predictions]\n",
    "    \n",
    "    rating_results = {}\n",
    "    for m in RATING_METRICS:\n",
    "        result = evaluator.metrics[m](test, prediction_df, **cols)\n",
    "        sb.glue(m, result)\n",
    "        rating_results[m] = result\n",
    "    print(rating_results)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 4.2 Recommend k items\n",
    "For top-k recommendation evaluation, we use the ranking pool (all the user-item pairs) we prepared at the [training step](#ranking-pool). The difference is we remove users' seen items from the pool in this step which is more natural to the movie recommendation scenario."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/engine/base_layer_v1.py:1684: UserWarning: `layer.add_variable` is deprecated and will be removed in a future version. Please use `layer.add_weight` method instead.\n",
      "  warnings.warn('`layer.add_variable` is deprecated and '\n"
     ]
    },
    {
     "data": {
      "application/scrapbook.scrap.json+json": {
       "name": "ndcg_at_k",
       "data": 0.035714114298555755,
       "encoder": "json",
       "version": 1
      }
     },
     "metadata": {
      "scrapbook": {
       "name": "ndcg_at_k",
       "data": true,
       "display": false
      }
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "application/scrapbook.scrap.json+json": {
       "name": "precision_at_k",
       "data": 0.03244962884411453,
       "encoder": "json",
       "version": 1
      }
     },
     "metadata": {
      "scrapbook": {
       "name": "precision_at_k",
       "data": true,
       "display": false
      }
     },
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{'ndcg_at_k': 0.035714114298555755, 'precision_at_k': 0.03244962884411453}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "IOPub data rate exceeded.\n",
      "The notebook server will temporarily stop sending output\n",
      "to the client in order to avoid crashing it.\n",
      "To change this limit, set the config variable\n",
      "`--NotebookApp.iopub_data_rate_limit`.\n",
      "\n",
      "Current values:\n",
      "NotebookApp.iopub_data_rate_limit=1000000.0 (bytes/sec)\n",
      "NotebookApp.rate_limit_window=3.0 (secs)\n",
      "\n"
     ]
    }
   ],
   "source": [
    "if len(RANKING_METRICS) > 0:\n",
    "    predictions = list(model.predict(input_fn=tf_utils.pandas_input_fn(df=ranking_pool)))\n",
    "    prediction_df = ranking_pool.copy()\n",
    "    prediction_df[PREDICT_COL] = [p['predictions'][0] for p in predictions]\n",
    "\n",
    "    # sort the results of the prediction\n",
    "    ranking_results = {}\n",
    "    for m in RANKING_METRICS:\n",
    "        result = evaluator.metrics[m](test, prediction_df, **{**cols, 'k': TOP_K})\n",
    "        sb.glue(m, result)\n",
    "        ranking_results[m] = result\n",
    "    print(ranking_results)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 50,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "         userID  itemID  \\\n",
      "0           626    1422   \n",
      "1           818     219   \n",
      "2            75    1639   \n",
      "3           352    1491   \n",
      "4            68    1639   \n",
      "...         ...     ...   \n",
      "1511121     179     811   \n",
      "1511122     225    1395   \n",
      "1511123     866    1054   \n",
      "1511124     665     916   \n",
      "1511125     473     636   \n",
      "\n",
      "                                                            genres  \n",
      "0        [0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0]  \n",
      "1        [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0]  \n",
      "2        [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]  \n",
      "3        [1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0]  \n",
      "4        [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]  \n",
      "...                                                            ...  \n",
      "1511121  [0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]  \n",
      "1511122  [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]  \n",
      "1511123  [0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]  \n",
      "1511124  [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0]  \n",
      "1511125  [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0]  \n",
      "\n",
      "[1511126 rows x 3 columns]\n",
      "         userID  itemID  \\\n",
      "0           626    1422   \n",
      "1           818     219   \n",
      "2            75    1639   \n",
      "3           352    1491   \n",
      "4            68    1639   \n",
      "...         ...     ...   \n",
      "1511121     179     811   \n",
      "1511122     225    1395   \n",
      "1511123     866    1054   \n",
      "1511124     665     916   \n",
      "1511125     473     636   \n",
      "\n",
      "                                                            genres  prediction  \n",
      "0        [0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0]    0.035233  \n",
      "1        [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0]    0.010089  \n",
      "2        [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]   -0.052412  \n",
      "3        [1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0]    0.010807  \n",
      "4        [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]   -0.032619  \n",
      "...                                                            ...         ...  \n",
      "1511121  [0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]   -0.014447  \n",
      "1511122  [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]   -0.000089  \n",
      "1511123  [0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]    0.001887  \n",
      "1511124  [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0]   -0.025082  \n",
      "1511125  [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0]   -0.053500  \n",
      "\n",
      "[1511126 rows x 4 columns]\n"
     ]
    }
   ],
   "source": [
    "print(ranking_pool)\n",
    "print(prediction_df)"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 4.3 Export Model\n",
    "Finally, we export the model so that we can load later for re-training, evaluation, and prediction.\n",
    "Examples of how to load, re-train, and evaluate the saved model can be found from [azureml_hyperdrive_wide_and_deep.ipynb](../04_model_select_and_optimize/azureml_hyperdrive_wide_and_deep.ipynb) notebook."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "os.makedirs(EXPORT_DIR_BASE, exist_ok=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/usr/local/anaconda/anaconda3/envs/DeepLearning/lib/python3.8/site-packages/keras/engine/base_layer_v1.py:1684: UserWarning: `layer.add_variable` is deprecated and will be removed in a future version. Please use `layer.add_weight` method instead.\n",
      "  warnings.warn('`layer.add_variable` is deprecated and '\n"
     ]
    },
    {
     "data": {
      "application/scrapbook.scrap.text+json": {
       "name": "saved_model_dir",
       "data": "./outputs/model/1651247093",
       "encoder": "text",
       "version": 1
      }
     },
     "metadata": {
      "scrapbook": {
       "name": "saved_model_dir",
       "data": true,
       "display": false
      }
     },
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model exported to ./outputs/model/1651247093\n"
     ]
    }
   ],
   "source": [
    "exported_path = tf_utils.export_model(\n",
    "    model=model,\n",
    "    train_input_fn=train_fn,\n",
    "    eval_input_fn=tf_utils.pandas_input_fn(\n",
    "        df=test, y_col=RATING_COL\n",
    "    ),\n",
    "    tf_feat_cols=wide_columns+deep_columns,\n",
    "    base_dir=EXPORT_DIR_BASE\n",
    ")\n",
    "sb.glue('saved_model_dir', str(exported_path))\n",
    "print(\"Model exported to\", str(exported_path))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# Close the event file so that the model folder can be cleaned up.\n",
    "summary_writer = tf.compat.v1.summary.FileWriterCache.get(model.model_dir)\n",
    "summary_writer.close()\n",
    "\n",
    "# Cleanup temporary directory if used\n",
    "if TMP_DIR is not None:\n",
    "    TMP_DIR.cleanup()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\r\n",
      "MetaGraphDef with tag-set: 'eval' contains the following SignatureDefs:\r\n",
      "\r\n",
      "signature_def['eval']:\r\n",
      "  The given SavedModel SignatureDef contains the following input(s):\r\n",
      "    inputs['genres'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: (-1, 19)\r\n",
      "        name: IteratorGetNext_2:0\r\n",
      "    inputs['itemID'] tensor_info:\r\n",
      "        dtype: DT_INT64\r\n",
      "        shape: (-1)\r\n",
      "        name: IteratorGetNext_1:0\r\n",
      "    inputs['label'] tensor_info:\r\n",
      "        dtype: DT_DOUBLE\r\n",
      "        shape: (-1)\r\n",
      "        name: IteratorGetNext_3:0\r\n",
      "    inputs['userID'] tensor_info:\r\n",
      "        dtype: DT_INT64\r\n",
      "        shape: (-1)\r\n",
      "        name: IteratorGetNext:0\r\n",
      "  The given SavedModel SignatureDef contains the following output(s):\r\n",
      "    outputs['loss'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: ()\r\n",
      "        name: head/weighted_loss/Sum:0\r\n",
      "    outputs['metrics/average_loss/update_op'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: ()\r\n",
      "        name: mean/update_op:0\r\n",
      "    outputs['metrics/average_loss/value'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: ()\r\n",
      "        name: mean/value:0\r\n",
      "    outputs['metrics/label/mean/update_op'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: ()\r\n",
      "        name: mean_1/update_op:0\r\n",
      "    outputs['metrics/label/mean/value'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: ()\r\n",
      "        name: mean_1/value:0\r\n",
      "    outputs['metrics/prediction/mean/update_op'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: ()\r\n",
      "        name: prediction/mean/update_op:0\r\n",
      "    outputs['metrics/prediction/mean/value'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: ()\r\n",
      "        name: prediction/mean/value:0\r\n",
      "    outputs['predictions'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: (-1, 1)\r\n",
      "        name: add:0\r\n",
      "  Method name is: tensorflow/supervised/eval\r\n",
      "\r\n",
      "MetaGraphDef with tag-set: 'serve' contains the following SignatureDefs:\r\n",
      "\r\n",
      "signature_def['predict']:\r\n",
      "  The given SavedModel SignatureDef contains the following input(s):\r\n",
      "    inputs['examples'] tensor_info:\r\n",
      "        dtype: DT_STRING\r\n",
      "        shape: (-1)\r\n",
      "        name: input_example_tensor:0\r\n",
      "  The given SavedModel SignatureDef contains the following output(s):\r\n",
      "    outputs['predictions'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: (-1, 1)\r\n",
      "        name: add:0\r\n",
      "  Method name is: tensorflow/serving/predict\r\n",
      "\r\n",
      "signature_def['regression']:\r\n",
      "  The given SavedModel SignatureDef contains the following input(s):\r\n",
      "    inputs['inputs'] tensor_info:\r\n",
      "        dtype: DT_STRING\r\n",
      "        shape: (-1)\r\n",
      "        name: input_example_tensor:0\r\n",
      "  The given SavedModel SignatureDef contains the following output(s):\r\n",
      "    outputs['outputs'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: (-1, 1)\r\n",
      "        name: add:0\r\n",
      "  Method name is: tensorflow/serving/regress\r\n",
      "\r\n",
      "signature_def['serving_default']:\r\n",
      "  The given SavedModel SignatureDef contains the following input(s):\r\n",
      "    inputs['inputs'] tensor_info:\r\n",
      "        dtype: DT_STRING\r\n",
      "        shape: (-1)\r\n",
      "        name: input_example_tensor:0\r\n",
      "  The given SavedModel SignatureDef contains the following output(s):\r\n",
      "    outputs['outputs'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: (-1, 1)\r\n",
      "        name: add:0\r\n",
      "  Method name is: tensorflow/serving/regress\r\n",
      "\r\n",
      "MetaGraphDef with tag-set: 'train' contains the following SignatureDefs:\r\n",
      "\r\n",
      "signature_def['train']:\r\n",
      "  The given SavedModel SignatureDef contains the following input(s):\r\n",
      "    inputs['genres'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: (-1, 19)\r\n",
      "        name: IteratorGetNext_2:0\r\n",
      "    inputs['itemID'] tensor_info:\r\n",
      "        dtype: DT_INT64\r\n",
      "        shape: (-1)\r\n",
      "        name: IteratorGetNext_1:0\r\n",
      "    inputs['label'] tensor_info:\r\n",
      "        dtype: DT_DOUBLE\r\n",
      "        shape: (-1)\r\n",
      "        name: IteratorGetNext_3:0\r\n",
      "    inputs['userID'] tensor_info:\r\n",
      "        dtype: DT_INT64\r\n",
      "        shape: (-1)\r\n",
      "        name: IteratorGetNext:0\r\n",
      "  The given SavedModel SignatureDef contains the following output(s):\r\n",
      "    outputs['loss'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: ()\r\n",
      "        name: head/weighted_loss/Sum:0\r\n",
      "    outputs['predictions'] tensor_info:\r\n",
      "        dtype: DT_FLOAT\r\n",
      "        shape: (-1, 1)\r\n",
      "        name: add:0\r\n",
      "  Method name is: tensorflow/supervised/training\r\n"
     ]
    }
   ],
   "source": [
    "# 查看模型文件相关信息\n",
    "!saved_model_cli show --dir {exported_path} --all"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "outputs": [],
   "source": [],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  }
 ],
 "metadata": {
  "interpreter": {
   "hash": "3a9a0c422ff9f08d62211b9648017c63b0a26d2c935edc37ebb8453675d13bb5"
  },
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.11"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}