{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "import torch\n",
    "import torch.autograd as autograd\n",
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "import torch.optim as optim\n",
    "import torch.utils.data as Data\n",
    "torch.manual_seed(8)\n",
    "\n",
    "import time\n",
    "import numpy as np\n",
    "import gc\n",
    "import sys\n",
    "sys.setrecursionlimit(50000)\n",
    "import pickle\n",
    "torch.backends.cudnn.benchmark = True\n",
    "torch.set_default_tensor_type('torch.cuda.FloatTensor')\n",
    "# from tensorboardX import SummaryWriter\n",
    "torch.nn.Module.dump_patches = True\n",
    "import copy\n",
    "import pandas as pd\n",
    "#then import my own modules\n",
    "from AttentiveFP import Fingerprint, Fingerprint_viz, save_smiles_dicts, get_smiles_dicts, get_smiles_array, moltosvg_highlight"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "from rdkit import Chem\n",
    "# from rdkit.Chem import AllChem\n",
    "from rdkit.Chem import QED\n",
    "from rdkit.Chem import rdMolDescriptors, MolSurf\n",
    "from rdkit.Chem.Draw import SimilarityMaps\n",
    "from rdkit import Chem\n",
    "from rdkit.Chem import AllChem\n",
    "from rdkit.Chem import rdDepictor\n",
    "from rdkit.Chem.Draw import rdMolDraw2D\n",
    "%matplotlib inline\n",
    "from numpy.polynomial.polynomial import polyfit\n",
    "import matplotlib.pyplot as plt\n",
    "import matplotlib.cm as cm\n",
    "import matplotlib\n",
    "import seaborn as sns; sns.set()\n",
    "from IPython.display import SVG, display\n",
    "import sascorer"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "number of all smiles:  9999\n",
      "number of successfully processed smiles:  9999\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAU8AAAC/CAYAAAB+KF5fAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAE+pJREFUeJzt3XtQVOX/B/A3VwHZJVAyFRUXpCARJdRSUcGKvMxETEw6ijA5kBOumLcZR8qybxfHS6MyK2AOFDpajU6lIlqogGb6h+3U5N1NQjQzBXZXFkE4vz8aT24L7vIEe1Z+79df7HOe8/B5DvL23HGTJEkCERF1irvSBRARPYoYnkREAhieREQCGJ5ERAIYnkREAhieREQCPJUuoKvdvGlSugSHBQb6oa6uUeky/rOeMI+eMAeA8+hqwcGqDpdxz1NBnp4eSpfQJXrCPHrCHADOw5kYnkREAhieREQCGJ5ERAIYnkREAhieREQCetytSj3BUX2t3T6TRw50QiVE1BHueRIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQmwG54HDhzA/PnzER8fj1GjRiElJQX79u2z6ffll1/ixRdfRHR0NFJSUnDixAmbPjdu3EB2djZGjRqFsWPHYvXq1bBYLEJjEREpyW54FhcXo3fv3lixYgV0Oh3Gjh2LJUuWoKSkRO6zf/9+rFq1Ci+//DK2bt2K8PBwvPHGG7hw4YLc5969e5g3bx6uXbuGTz75BCtXrkRZWRnefvttq+/nyFhERErztNdhy5YtCAoKkj8/99xz+PPPP1FUVIS0tDQAwKZNm5CcnIzs7GwAwJgxY3D27FkUFhZi3bp1AICysjJcvnwZhw4dwqBBg/7+5p6eWLx4MRYsWIDQ0FCHxyIiUprdPc8Hg/O+yMhI3L59GwBQU1ODK1euYOrUqf8M6u6OpKQkVFVVyW2VlZWIjo6WgxMAnn/+eXh5ecn9HB2LiEhpQheMfvrpJ4SFhQEADAYDAECj0Vj1CQsLQ319vRyyBoPBpo+3tzcGDx4sj+HoWERESrN72P5vJ06cQHl5OT788EMAQENDAwBArVZb9QsICJCXBwUFwWg0QqVS2YynVqthNBo7NdbDBAb6wdPTo7PTUkxwsO02Ufn7CK2nJFerR0RPmAPAeThLp8Lz6tWrWLJkCaZMmYKUlBSrZW5ublafJUmyaf93nwf7dXasjtTVNdrt4yqCg1W4edNk024yN9ldt731lNLRPB4lPWEOAOfRHXV0xOHwrK+vR2ZmJvr374+1a9fK7ff3Cv+9Z3l/b/L+XqRarYbJ1E5QmExyH0fHIuCovtZun8kjBzqhEqL/nxw652mxWDB//ny0tLSgsLAQfn5+8rL75yfvn6+8z2Aw4LHHHpMPszUajU2f5uZm1NTUyGM4OhYRkdLshue9e/eQk5ODK1euYOvWrejTp4/V8kGDBiE0NBRlZWVyW1tbG8rKyhAfHy+3TZw4Eb/88gtqa//ZYzp8+DCam5vlfo6ORUSkNLuH7e+99x4qKiqwcuVKNDQ0QK/Xy8uioqLg7e0NrVaLZcuWYeDAgYiNjcXXX3+N6upqrF+/Xu6blJSE/Px8aLVa5OTkwGQy4aOPPsKMGTPkezwBODQWEZHS7Ibn8ePHAQAffPCBzbLy8nKEhIRgxowZaGxsxNatW6HT6TBs2DAUFBQgIiJC7uvl5YVPP/0Uq1evxqJFi+Dt7Y1p06Zh+fLlVmM6MhYRkdLcpPYudz/CXOEKnaM6uqLoyMUgRzjrgpGrXBn9L3rCHADOozvq6AjfqkREJIDhSUQkgOFJRCSA4UlEJKDTz7bTf/PgxSCVv49Dj2ISkevhnicRkQCGJxGRAIYnEZEAhicRkQCGJxGRAIYnEZEAhicRkQCGJxGRAIYnEZEAhicRkQCGJxGRAIYnEZEAhicRkQCGJxGRAIYnEZEAhicRkQCGJxGRAIYnEZEAhicRkQCGJxGRAIYnEZEAhicRkQCGJxGRAP7d9i704N9kJ6KejXueREQCGJ5ERAIYnkREAhieREQCGJ5ERAIYnkREAhieREQCGJ5ERAIYnkREAhieREQCHHo8s7q6Gtu2bYNer8fFixcRFxeHkpISqz6SJKGgoAA7d+5EXV0doqOjkZubi8jISKt+ly5dwvvvvw+9Xg+VSoXU1FQsWLAAHh4enR6LHs6Rx0UnjxzohEqIeh6H9jwvXryIiooKhIaGIjQ0tN0+hYWF0Ol0yMzMRH5+Pvz8/JCRkYGbN2/KfRoaGpCRkQE3NzfodDpkZ2ejqKgImzZt6vRYRERKcig8ExMTUVFRgU2bNmHYsGE2y+/evYvCwkJkZWVhzpw5GDduHDZu3Ag3Nzds375d7rdr1y7cvXsXeXl5GD9+PGbNmoXs7GwUFxfDbDZ3aiwiIiU5FJ7u7g/vdvr0aZjNZkydOlVu8/PzQ0JCAqqqquS2yspKTJgwAf7+/nLb9OnT0dTUhFOnTnVqLCIiJXXJBSODwQAPDw+bQ/qwsDAYDAarfhqNxqrPgAED4OvrK/dzdCwiIiV1SXgajUb4+flZXfQBgICAAFgsFjQ3N8v9VCqVzfpqtRpGo7FTYxERKanLXobs5uZm0yZJks2yjvo50qejZQ8KDPSDp6fHQ/t0F5W/j1PW6UrBwbb/mSk5jpJ6whwAzsNZuiQ81Wo17ty5g9bWVqs9RqPRCF9fX3h5ecn9TCaTzfpms1neI3V0rI7U1TV2xZSEmMxNneqv8vfp9Dpd7eZN259HZwUHq7pkHCX1hDkAnEd31NGRLjls12g0aG1tRXV1tVX7v89xajQam/OW169fR2Njo9zP0bGIiJTUJeEZGxsLf39/lJWVyW0WiwVHjhxBfHy83DZx4kQcO3ZMvi0JAEpLS+Hj44MxY8Z0aiwiIiU5dNhusVhQUVEBALhx4wbMZrMcbpMmTYKvry+ysrKg0+kQEBAAjUaDoqIitLW1IS0tTR5n5syZKCkpgVarRWZmJmpqapCXl4eMjAz59qVevXo5NBYRkZIcCs9bt24hJyfHqu3+5/LycoSEhCArKwttbW0oKChAfX09hg8fjqKiIvTt21deJyAgAMXFxVi9ejXmz58PtVqN9PR0aLVaq7EdGYuISElu0v3L2D2EkieZO/unh13hglFXPNvuKif3/4ueMAeA8+iOOjrCtyoREQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJYHgSEQlgeBIRCWB4EhEJ8FS6gEfFUX2t0iV0C0fmNXnkQCdUQvRo4Z4nEZEAhicRkQCGJxGRAIYnEZEAhicRkQCGJxGRAIYnEZEA3udJdtm7F1Tl74Nnwvs4qRoi18A9TyIiAQxPIiIBDE8iIgEMTyIiAS4bnpcuXUJ6ejpiYmIwYcIEbNy4Ea2trUqXRUQEwEWvtjc0NCAjIwPh4eHQ6XT4/fffsWbNGrS1teGtt95SujwiItcMz127duHu3bvIy8uDv78/xo8fD7PZjLy8PGRmZsLf379Lv19Pfd0cEXUflwzPyspKTJgwwSokp0+fjnXr1uHUqVNITExUsDpqj73/gPhOUOppXPKcp8FggEajsWobMGAAfH19YTAYFKqKiOgfLrnnaTQaoVKpbNrVajWMRqMCFdF/xTfWU0/jkuEJAG5ubjZtkiS12/6g4GDb0LUn9YWnOr0O9Twi/3ZcEefhHC552K5Wq2EymWzazWZzu3ukRETO5pLhqdFobM5tXr9+HY2NjTbnQomIlOCS4Tlx4kQcO3YMZrNZbistLYWPjw/GjBmjYGVERH9zyfCcOXMmvL29odVq8cMPP+CLL75AXl4eMjIyuvweTyIiEW6SJElKF9GeS5cuYfXq1dDr9VCr1Xj11Veh1Wrh4eGhdGlERK4bnkRErsxlb1V61B04cADffPMNfv31V5jNZgwdOhSvv/46ZsyYIfdJS0vDqVOnbNb9+eef0atXL2eW26E9e/ZgxYoVNu3vvvsuZs2aBeDvW8gKCgqwc+dO1NXVITo6Grm5uYiMjHR2uR3qaFsDfz8OPGrUKCQmJqK21vp+1L59++L48ePOKLFd1dXV2LZtG/R6PS5evIi4uDiUlJRY9XF0+1+6dAnvv/8+9Ho9VCoVUlNTsWDBgm4/mrM3hz///BNFRUU4fvw4ampqoFar8eyzz2Lx4sXo16+f3O/kyZOYO3euzfiZmZlYunRpt86hPQzPblJcXIyQkBCsWLECgYGBqKysxJIlS1BXV4e0tDS539ixY7F48WKrdb29vZ1drl2fffYZfHx85M+DBg2Svy4sLIROp8Py5cuh0WhQVFSEjIwM7Nu3D8HBwUqUa2PVqlVWFyABYNOmTThz5gyio6PlthkzZlj9fLy8vJxWY3suXryIiooKxMTEoKWlpd0+jmx/JV+2Y28Ov/76K77//nukpqZixIgRuHXrFjZv3oxZs2Zh79696N27t1X/devWWf37ezBgnUqibnHr1i2btsWLF0sJCQny5zlz5khardaZZXXa7t27pYiICMlsNre7vKmpSYqNjZU2b94st925c0caO3astGHDBmeV2Wl3796VRo8eLb3zzjtyW0JCgvTxxx8rWJWt1tZW+WutVivNmTPHarmj2z8/P1+Ki4uTTCaT3FZYWCiNGDHCqq072JtDQ0OD1NLSYtVmMBikiIgIac+ePXLbjz/+KEVEREjnz5/v1nod5ZJX23uCoKAgm7bIyEjcvn1bgWq6z+nTp2E2mzF16lS5zc/PDwkJCaiqqlKwsoerqqpCQ0OD1WkUV+Tu/vBfUUe3f0cv22lqaurwdEZXsTcHtVoNT0/rg+ChQ4fC19cXt27d6s7S/hOGpxP99NNPCAsLs2o7duwYYmJiEBMTg3nz5uHcuXMKVfdwL7zwAqKiopCUlIRdu3bJ7QaDAR4eHggNDbXqHxYW5tIvcSktLUW/fv0QFxdn1b57924MHz4czzzzDBYuXGhzDtTVOLr9H7WX7Zw7dw4WiwXh4eE2y9LT0xEZGYnExETodDrFXpLOc55OcuLECZSXl+PDDz+U20aPHo3k5GQMGTIEtbW1yM/Px+zZs/HNN98gJCREwWr/ERwcjJycHIwYMQKtra3Yv38/Vq1ahaamJmRkZMBoNMLPz8/mokNAQAAsFguam5td7hyuxWLB4cOH8dprr1m9KyExMREjR47EE088gcuXLyMvLw+zZ8/G3r17XfaxYEe3/6P0sp22tjZ88MEHCA0Nxfjx4+V2lUqFrKwsxMXFwcvLC0eOHMHmzZtx+/Zt5ObmOr1OhqcTXL16FUuWLMGUKVOQkpIity9cuFD+Oi4uDuPGjcPUqVPx2WefYeXKlUqUaiM+Ph7x8fHy50mTJqG5uRlbtmyRr3x29BKXjpYp7ciRI2hsbMT06dOt2h/8BYyLi8OoUaOQnJyM3bt3IyMjw8lVOs7R7S/6sh1nW79+PfR6PbZv3251wS4qKgpRUVHy53HjxsHb2xvFxcV488032z1V1p142N7N6uvrkZmZif79+2Pt2rUP7RscHIzY2FicOXPGSdWJSUpKQn19PWpra6FWq3Hnzh2bQyej0QhfX1/Fr1a3Z//+/RgyZIjVVfb2REREYOjQoS7983B0+z8qL9vZsWMHtm3bhjVr1iAmJsZu/6SkJNy7dw/nz593QnXWGJ7dyGKxYP78+WhpaUFhYSH8/PwcWs/V9gQeRqPRoLW1FdXV1Vbt7Z1jcwUmkwmVlZU2e50P48o/D0e3/6Pwsp2DBw/if//7H5YtW4Zp06Z1al0lfkYMz25y79495OTk4MqVK9i6dSv69Oljd52//voLp0+fxtNPP+2ECsUdOnQIgYGBGDhwIGJjY+Hv74+ysjJ5ucViwZEjR6wO913Fd999h+bmZoeusl+4cAG//fabS/88HN3+rv6ynZMnT2Lp0qWYPXs25s2b5/B6hw4dgqenJ5588slurK59POfZTd577z1UVFRg5cqVaGhogF6vl5dFRUXBYDBgw4YNeOmllzBgwABcv34dBQUFcHd3R3p6uoKVW9NqtYiOjsaTTz6JtrY2lJaWorS0FLm5uXB3d0evXr2QlZUFnU6HgIAA+SbttrY2q5vNXcX+/fvx1FNP2dz1cPToUXz77beYPHkyHn/8cRgMBmzZsgX9+/e3Ok/tbBaLBRUVFQCAGzduwGw2y0E5adIk+Pr6OrT9Z86ciZKSEmi1WmRmZqKmpsZpL9uxN4dr164hOzsbGo0G06ZNs/pdCQoKwuDBgwH8/aBDUFAQoqOj4eXlhYqKCuzYsQPp6ekIDAzs1jm0h8+2d5P2HvW7r7y8HF5eXsjNzcXZs2dRX1+P3r17Y8yYMVi0aJHNL7aSNmzYgIMHD+KPP/6AJEkIDw/H3LlzkZycLPeRJAn5+fnYuXMn6uvrMXz4cOTm5lqd3HcFt2/fRnx8PHJycpCVlWW17Ny5c/joo49w/vx5mEwmPPbYY4iPj8dbb72l3BMs+Pti45QpU9pdVl5ejpCQEIe3v1Iv27E3h1OnTrX7CDAAvPLKK/j4448BAJ9//jm++uorXL16FS0tLRgyZAhSU1Mxd+5cu/eSdgeGJxGRAJ7zJCISwPAkIhLA8CQiEsDwJCISwPAkIhLA8CQiEsDwJCISwPAkIhLA8CQiEvB/1IY/p88OPM0AAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 360x216 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "task_name = 'Malaria Bioactivity'\n",
    "tasks = ['Loge EC50']\n",
    "\n",
    "raw_filename = \"../data/malaria-processed.csv\"\n",
    "feature_filename = raw_filename.replace('.csv','.pickle')\n",
    "filename = raw_filename.replace('.csv','')\n",
    "prefix_filename = raw_filename.split('/')[-1].replace('.csv','')\n",
    "smiles_tasks_df = pd.read_csv(raw_filename, names = [\"Loge EC50\", \"smiles\"])\n",
    "smilesList = smiles_tasks_df.smiles.values\n",
    "print(\"number of all smiles: \",len(smilesList))\n",
    "atom_num_dist = []\n",
    "remained_smiles = []\n",
    "canonical_smiles_list = []\n",
    "for smiles in smilesList:\n",
    "    try:        \n",
    "        mol = Chem.MolFromSmiles(smiles)\n",
    "        atom_num_dist.append(len(mol.GetAtoms()))\n",
    "        remained_smiles.append(smiles)\n",
    "        canonical_smiles_list.append(Chem.MolToSmiles(Chem.MolFromSmiles(smiles), isomericSmiles=True))\n",
    "    except:\n",
    "        print(smiles)\n",
    "        pass\n",
    "print(\"number of successfully processed smiles: \", len(remained_smiles))\n",
    "smiles_tasks_df = smiles_tasks_df[smiles_tasks_df[\"smiles\"].isin(remained_smiles)]\n",
    "# print(smiles_tasks_df)\n",
    "smiles_tasks_df['cano_smiles'] =canonical_smiles_list\n",
    "\n",
    "plt.figure(figsize=(5, 3))\n",
    "sns.set(font_scale=1.5)\n",
    "ax = sns.distplot(atom_num_dist, bins=28, kde=False)\n",
    "plt.tight_layout()\n",
    "# plt.savefig(\"atom_num_dist_\"+prefix_filename+\".png\",dpi=200)\n",
    "plt.show()\n",
    "plt.close()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "random_seed = 68\n",
    "start_time = str(time.ctime()).replace(':','-').replace(' ','_')\n",
    "\n",
    "batch_size = 200\n",
    "epochs = 800\n",
    "\n",
    "p_dropout= 0.03\n",
    "fingerprint_dim = 200\n",
    "\n",
    "weight_decay = 4.3 # also known as l2_regularization_lambda\n",
    "learning_rate = 4\n",
    "radius = 2\n",
    "T = 1\n",
    "per_task_output_units_num = 1 # for regression model\n",
    "output_units_num = len(tasks) * per_task_output_units_num"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Loge EC50</th>\n",
       "      <th>smiles</th>\n",
       "      <th>cano_smiles</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "Empty DataFrame\n",
       "Columns: [Loge EC50, smiles, cano_smiles]\n",
       "Index: []"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "if os.path.isfile(feature_filename):\n",
    "    feature_dicts = pickle.load(open(feature_filename, \"rb\" ))\n",
    "else:\n",
    "    feature_dicts = save_smiles_dicts(smilesList,filename)\n",
    "# feature_dicts = get_smiles_dicts(smilesList)\n",
    "remained_df = smiles_tasks_df[smiles_tasks_df[\"cano_smiles\"].isin(feature_dicts['smiles_to_atom_mask'].keys())]\n",
    "uncovered_df = smiles_tasks_df.drop(remained_df.index)\n",
    "uncovered_df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "test_df = remained_df.sample(frac=0.2,random_state=random_seed)\n",
    "train_df = remained_df.drop(test_df.index)\n",
    "train_df = train_df.reset_index(drop=True)\n",
    "test_df = test_df.reset_index(drop=True)\n",
    "# print(len(test_df),sorted(test_df.cano_smiles.values))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "863604\n",
      "atom_fc.weight torch.Size([200, 39])\n",
      "atom_fc.bias torch.Size([200])\n",
      "neighbor_fc.weight torch.Size([200, 49])\n",
      "neighbor_fc.bias torch.Size([200])\n",
      "GRUCell.0.weight_ih torch.Size([600, 200])\n",
      "GRUCell.0.weight_hh torch.Size([600, 200])\n",
      "GRUCell.0.bias_ih torch.Size([600])\n",
      "GRUCell.0.bias_hh torch.Size([600])\n",
      "GRUCell.1.weight_ih torch.Size([600, 200])\n",
      "GRUCell.1.weight_hh torch.Size([600, 200])\n",
      "GRUCell.1.bias_ih torch.Size([600])\n",
      "GRUCell.1.bias_hh torch.Size([600])\n",
      "align.0.weight torch.Size([1, 400])\n",
      "align.0.bias torch.Size([1])\n",
      "align.1.weight torch.Size([1, 400])\n",
      "align.1.bias torch.Size([1])\n",
      "attend.0.weight torch.Size([200, 200])\n",
      "attend.0.bias torch.Size([200])\n",
      "attend.1.weight torch.Size([200, 200])\n",
      "attend.1.bias torch.Size([200])\n",
      "mol_GRUCell.weight_ih torch.Size([600, 200])\n",
      "mol_GRUCell.weight_hh torch.Size([600, 200])\n",
      "mol_GRUCell.bias_ih torch.Size([600])\n",
      "mol_GRUCell.bias_hh torch.Size([600])\n",
      "mol_align.weight torch.Size([1, 400])\n",
      "mol_align.bias torch.Size([1])\n",
      "mol_attend.weight torch.Size([200, 200])\n",
      "mol_attend.bias torch.Size([200])\n",
      "output.weight torch.Size([1, 200])\n",
      "output.bias torch.Size([1])\n"
     ]
    }
   ],
   "source": [
    "x_atom, x_bonds, x_atom_index, x_bond_index, x_mask, smiles_to_rdkit_list = get_smiles_array([canonical_smiles_list[0]],feature_dicts)\n",
    "num_atom_features = x_atom.shape[-1]\n",
    "num_bond_features = x_bonds.shape[-1]\n",
    "loss_function = nn.MSELoss()\n",
    "model = Fingerprint(radius, T, num_atom_features, num_bond_features,\n",
    "            fingerprint_dim, output_units_num, p_dropout)\n",
    "model.cuda()\n",
    "\n",
    "# optimizer = optim.Adam(model.parameters(), learning_rate, weight_decay=weight_decay)\n",
    "optimizer = optim.Adam(model.parameters(), 10**-learning_rate, weight_decay=10**-weight_decay)\n",
    "# optimizer = optim.SGD(model.parameters(), 10**-learning_rate, weight_decay=10**-weight_decay)\n",
    "\n",
    "# tensorboard = SummaryWriter(log_dir=\"runs/\"+start_time+\"_\"+prefix_filename+\"_\"+str(fingerprint_dim)+\"_\"+str(p_dropout))\n",
    "\n",
    "model_parameters = filter(lambda p: p.requires_grad, model.parameters())\n",
    "params = sum([np.prod(p.size()) for p in model_parameters])\n",
    "print(params)\n",
    "for name, param in model.named_parameters():\n",
    "    if param.requires_grad:\n",
    "        print(name, param.data.shape)\n",
    "        "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "def train(model, dataset, optimizer, loss_function):\n",
    "    model.train()\n",
    "    np.random.seed(epoch)\n",
    "    valList = np.arange(0,dataset.shape[0])\n",
    "    #shuffle them\n",
    "    np.random.shuffle(valList)\n",
    "    batch_list = []\n",
    "    for i in range(0, dataset.shape[0], batch_size):\n",
    "        batch = valList[i:i+batch_size]\n",
    "        batch_list.append(batch)   \n",
    "    for counter, batch in enumerate(batch_list):\n",
    "        batch_df = dataset.loc[batch,:]\n",
    "        smiles_list = batch_df.cano_smiles.values\n",
    "        y_val = batch_df[tasks[0]].values\n",
    "        \n",
    "        x_atom, x_bonds, x_atom_index, x_bond_index, x_mask, smiles_to_rdkit_list = get_smiles_array(smiles_list,feature_dicts)\n",
    "        atoms_prediction, mol_prediction = model(torch.Tensor(x_atom),torch.Tensor(x_bonds),torch.cuda.LongTensor(x_atom_index),torch.cuda.LongTensor(x_bond_index),torch.Tensor(x_mask))\n",
    "        \n",
    "        optimizer.zero_grad()\n",
    "        loss = loss_function(mol_prediction, torch.Tensor(y_val).view(-1,1))     \n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "def eval(model, dataset):\n",
    "    model.eval()\n",
    "    test_MAE_list = []\n",
    "    test_MSE_list = []\n",
    "    valList = np.arange(0,dataset.shape[0])\n",
    "    batch_list = []\n",
    "    for i in range(0, dataset.shape[0], batch_size):\n",
    "        batch = valList[i:i+batch_size]\n",
    "        batch_list.append(batch) \n",
    "    for counter, batch in enumerate(batch_list):\n",
    "        batch_df = dataset.loc[batch,:]\n",
    "        smiles_list = batch_df.cano_smiles.values\n",
    "#         print(batch_df)\n",
    "        y_val = batch_df[tasks[0]].values\n",
    "        \n",
    "        x_atom, x_bonds, x_atom_index, x_bond_index, x_mask, smiles_to_rdkit_list = get_smiles_array(smiles_list,feature_dicts)\n",
    "        atoms_prediction, mol_prediction = model(torch.Tensor(x_atom),torch.Tensor(x_bonds),torch.cuda.LongTensor(x_atom_index),torch.cuda.LongTensor(x_bond_index),torch.Tensor(x_mask))\n",
    "        MAE = F.l1_loss(mol_prediction, torch.Tensor(y_val).view(-1,1), reduction='none')        \n",
    "        MSE = F.mse_loss(mol_prediction, torch.Tensor(y_val).view(-1,1), reduction='none')\n",
    "#         print(x_mask[:2],atoms_prediction.shape, mol_prediction,MSE)\n",
    "        \n",
    "        test_MAE_list.extend(MAE.data.squeeze().cpu().numpy())\n",
    "        test_MSE_list.extend(MSE.data.squeeze().cpu().numpy())\n",
    "    return np.array(test_MAE_list).mean(), np.array(test_MSE_list).mean()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 1.5478796 1.628472\n",
      "1 1.448478 1.5302224\n",
      "2 1.4498698 1.5333989\n",
      "3 1.425018 1.505234\n",
      "4 1.441523 1.5231948\n",
      "5 1.4017888 1.4746803\n",
      "6 1.3749068 1.4464937\n",
      "7 1.3531059 1.4220781\n",
      "8 1.3480316 1.4143926\n",
      "9 1.3431658 1.3971175\n",
      "10 1.2978153 1.3485527\n",
      "11 1.2780799 1.3257537\n",
      "12 1.2728355 1.3112742\n",
      "13 1.3038107 1.335188\n",
      "14 1.2483072 1.2825298\n",
      "15 1.2388601 1.2719156\n",
      "16 1.2517695 1.2779336\n",
      "17 1.2320278 1.258369\n",
      "18 1.2472036 1.2736138\n",
      "19 1.2221298 1.2528942\n",
      "20 1.2216604 1.2494508\n",
      "21 1.2252085 1.2557986\n",
      "22 1.2185632 1.2483377\n",
      "23 1.2103592 1.2352843\n",
      "24 1.2274623 1.2619636\n",
      "25 1.2248856 1.2448567\n",
      "26 1.2058675 1.2268171\n",
      "27 1.1973008 1.2225409\n",
      "28 1.2388365 1.2538594\n",
      "29 1.1966963 1.2268447\n",
      "30 1.1990334 1.2282244\n",
      "31 1.2003697 1.2185816\n",
      "32 1.1839876 1.2106801\n",
      "33 1.1827517 1.2043097\n",
      "34 1.1821854 1.2080361\n",
      "35 1.192607 1.2253516\n",
      "36 1.1787097 1.2061633\n",
      "37 1.1723497 1.1951882\n",
      "38 1.1703535 1.1923693\n",
      "39 1.1852618 1.2026875\n",
      "40 1.1674594 1.1937082\n",
      "41 1.1659837 1.1897207\n",
      "42 1.1686869 1.1905769\n",
      "43 1.1777316 1.2105943\n",
      "44 1.1562651 1.1826354\n",
      "45 1.1648812 1.1846714\n",
      "46 1.154026 1.1791822\n",
      "47 1.1655822 1.196853\n",
      "48 1.1531729 1.1815648\n",
      "49 1.1565808 1.178542\n",
      "50 1.1544813 1.1750175\n",
      "51 1.140072 1.1675918\n",
      "52 1.1456698 1.178533\n",
      "53 1.1333617 1.1597456\n",
      "54 1.1354048 1.1638831\n",
      "55 1.164166 1.1841472\n",
      "56 1.1448212 1.1782615\n",
      "57 1.1332649 1.1627176\n",
      "58 1.127703 1.1599286\n",
      "59 1.1221596 1.1548471\n",
      "60 1.12169 1.1535094\n",
      "61 1.1224706 1.1555935\n",
      "62 1.1189086 1.1538434\n",
      "63 1.1206045 1.1604556\n",
      "64 1.1156883 1.1504178\n",
      "65 1.1281708 1.1710228\n",
      "66 1.1141084 1.1523719\n",
      "67 1.1352739 1.1582422\n",
      "68 1.1289911 1.1701506\n",
      "69 1.1031383 1.1397016\n",
      "70 1.1080263 1.1506523\n",
      "71 1.101588 1.1464661\n",
      "72 1.1037246 1.1360483\n",
      "73 1.1068801 1.1555248\n",
      "74 1.0996981 1.1369984\n",
      "75 1.0901781 1.1297038\n",
      "76 1.0930307 1.141583\n",
      "77 1.0925688 1.1364609\n",
      "78 1.0862478 1.1234071\n",
      "79 1.0829743 1.1236157\n",
      "80 1.0779526 1.1214095\n",
      "81 1.0828499 1.1204759\n",
      "82 1.1069263 1.1646699\n",
      "83 1.0796647 1.1195198\n",
      "84 1.1158919 1.1517202\n",
      "85 1.0740962 1.126662\n",
      "86 1.066531 1.1132535\n",
      "87 1.0651407 1.1173816\n",
      "88 1.0702162 1.1199963\n",
      "89 1.0604169 1.1122426\n",
      "90 1.0596182 1.1123289\n",
      "91 1.0729758 1.1181995\n",
      "92 1.0657495 1.1137776\n",
      "93 1.0815995 1.1262543\n",
      "94 1.0681939 1.1368322\n",
      "95 1.0540236 1.1088722\n",
      "96 1.0671438 1.1190476\n",
      "97 1.0504636 1.1194018\n",
      "98 1.0491142 1.1039102\n",
      "99 1.0535622 1.1070195\n",
      "100 1.0514883 1.12581\n",
      "101 1.0407002 1.1057312\n",
      "102 1.0495203 1.1112329\n",
      "103 1.0507386 1.10453\n",
      "104 1.045134 1.1150136\n",
      "105 1.075234 1.1316034\n",
      "106 1.0322795 1.1002258\n",
      "107 1.0299097 1.0963951\n",
      "108 1.0251101 1.1000998\n",
      "109 1.0269638 1.1022891\n",
      "110 1.0248313 1.1036345\n",
      "111 1.0261763 1.105335\n",
      "112 1.0527643 1.1144909\n",
      "113 1.0702273 1.1285415\n",
      "114 1.0290071 1.1007965\n",
      "115 1.0278382 1.0964351\n",
      "116 1.0218133 1.0933454\n",
      "117 1.0171787 1.1098249\n",
      "118 1.016715 1.1080462\n",
      "119 1.0087729 1.090412\n",
      "120 1.0153209 1.0904987\n",
      "121 1.0084295 1.0854156\n",
      "122 1.0417293 1.1106514\n",
      "123 1.0106094 1.109998\n",
      "124 1.0075526 1.1051438\n",
      "125 0.99493986 1.0864431\n",
      "126 1.0209585 1.1195904\n",
      "127 1.0206367 1.1058037\n",
      "128 1.0401722 1.1169345\n",
      "129 0.99849844 1.1042134\n",
      "130 0.9996711 1.0931513\n",
      "131 0.9881659 1.087543\n",
      "132 0.9924026 1.0863233\n",
      "133 0.9966203 1.0807244\n",
      "134 1.0024333 1.0856594\n",
      "135 0.98214436 1.0768853\n",
      "136 0.97878313 1.0825303\n",
      "137 0.97986424 1.0865455\n",
      "138 0.9947611 1.0906919\n",
      "139 1.0046384 1.1250268\n",
      "140 0.98354554 1.0909718\n",
      "141 0.9750546 1.0933058\n",
      "142 1.0001419 1.1294862\n",
      "143 0.98097026 1.0785336\n",
      "144 0.98072904 1.0902058\n",
      "145 0.9841303 1.0924876\n",
      "146 0.97226673 1.0922194\n",
      "147 1.0001239 1.1179134\n",
      "148 0.97952026 1.083761\n",
      "149 0.9741321 1.0737098\n",
      "150 0.9803714 1.1163975\n",
      "151 0.9659421 1.0717795\n",
      "152 0.9644079 1.0751549\n",
      "153 0.95689017 1.0708467\n",
      "154 0.9593581 1.0665402\n",
      "155 0.9631806 1.0723978\n",
      "156 1.0000592 1.1025144\n",
      "157 0.96277064 1.0715212\n",
      "158 0.9656637 1.0729246\n",
      "159 0.95358014 1.0911589\n",
      "160 0.985153 1.0854833\n",
      "161 0.9466197 1.0760896\n",
      "162 0.94896334 1.0705599\n",
      "163 0.9483722 1.0657388\n",
      "164 0.9669053 1.1121676\n",
      "165 0.9471728 1.0699009\n",
      "166 0.94368273 1.0687855\n",
      "167 0.95626557 1.1020058\n",
      "168 0.94042385 1.0833832\n",
      "169 0.9371707 1.0849195\n",
      "170 0.93306607 1.0754732\n",
      "171 0.93269265 1.0799791\n",
      "172 0.9692426 1.0875376\n",
      "173 0.9305774 1.0756998\n",
      "174 0.937176 1.0755522\n",
      "175 0.93309265 1.0671539\n",
      "176 0.9390211 1.0749211\n",
      "177 0.93968827 1.0733086\n",
      "178 0.9334923 1.0811872\n",
      "179 0.9251805 1.0630808\n",
      "180 0.91648084 1.0657479\n",
      "181 0.9308954 1.0883684\n",
      "182 0.9177866 1.0728799\n",
      "183 0.917589 1.0884405\n",
      "184 0.9171411 1.0796255\n",
      "185 0.9208634 1.0580701\n",
      "186 0.93198663 1.0620881\n",
      "187 0.9155113 1.0762402\n",
      "188 0.91472447 1.0656248\n",
      "189 0.9119055 1.0613525\n",
      "190 0.9012815 1.063597\n",
      "191 0.9027621 1.0593306\n",
      "192 0.9300988 1.1077\n",
      "193 0.89699346 1.0603526\n",
      "194 0.92488444 1.0631211\n",
      "195 0.9032333 1.0613841\n",
      "196 0.90182465 1.0695539\n",
      "197 0.9025315 1.0919254\n",
      "198 0.9042049 1.0860193\n",
      "199 0.9126359 1.083521\n",
      "200 0.9692006 1.1568716\n",
      "201 0.908373 1.0959966\n",
      "202 0.9218768 1.1189582\n",
      "203 0.89537996 1.0620989\n",
      "204 0.89987886 1.0744736\n",
      "205 0.89468896 1.0604948\n",
      "206 0.888804 1.0567634\n",
      "207 0.8840221 1.0552068\n",
      "208 0.8805379 1.058471\n",
      "209 0.8820039 1.0684204\n",
      "210 0.8862652 1.0805753\n",
      "211 0.8783945 1.0706438\n",
      "212 0.87796396 1.0617826\n",
      "213 0.87243545 1.0646216\n",
      "214 0.8761259 1.0830073\n",
      "215 0.87422526 1.0633651\n",
      "216 0.9056303 1.0777858\n",
      "217 0.8925985 1.0878736\n",
      "218 0.8784354 1.0615025\n",
      "219 0.8731537 1.0548302\n",
      "220 0.9149376 1.1416457\n",
      "221 0.8621062 1.0601137\n",
      "222 0.86777914 1.0655193\n",
      "223 0.8573421 1.0621299\n",
      "224 0.8715197 1.0763559\n",
      "225 0.91627705 1.1513237\n",
      "226 0.8752412 1.0781085\n",
      "227 0.8588178 1.0539489\n",
      "228 0.8601615 1.0728817\n",
      "229 0.8517768 1.0567849\n",
      "230 0.8765036 1.0625409\n",
      "231 0.85206234 1.0608413\n",
      "232 0.84909046 1.0719537\n",
      "233 0.90553886 1.0861733\n",
      "234 0.86771685 1.0538828\n",
      "235 0.8522023 1.0550134\n",
      "236 0.84238267 1.0512835\n",
      "237 0.8521414 1.0606356\n",
      "238 0.8634289 1.0559068\n",
      "239 0.8522082 1.0574617\n",
      "240 0.85946465 1.0443797\n",
      "241 0.84823495 1.062013\n",
      "242 0.85347533 1.0470026\n",
      "243 0.84134394 1.0623857\n",
      "244 0.8391631 1.0598451\n",
      "245 0.843294 1.0720752\n",
      "246 0.8468988 1.061403\n",
      "247 0.83788294 1.072817\n",
      "248 0.83136016 1.0539181\n",
      "249 0.8373735 1.0727422\n",
      "250 0.8245447 1.0620985\n",
      "251 0.8266534 1.0499208\n",
      "252 0.8293676 1.0547549\n",
      "253 0.87806547 1.070056\n",
      "254 0.8247368 1.0787842\n",
      "255 0.84656215 1.0745807\n",
      "256 0.82602674 1.0597908\n",
      "257 0.83549565 1.0872319\n",
      "258 0.8481979 1.0877751\n"
     ]
    }
   ],
   "source": [
    "best_param ={}\n",
    "best_param[\"train_epoch\"] = 0\n",
    "best_param[\"test_epoch\"] = 0\n",
    "best_param[\"train_MSE\"] = 9e8\n",
    "best_param[\"test_MSE\"] = 9e8\n",
    "\n",
    "for epoch in range(800):\n",
    "    train_MAE, train_MSE = eval(model, train_df)\n",
    "    test_MAE, test_MSE = eval(model, test_df)\n",
    "#     tensorboard.add_scalars('MAE',{'train_MAE':test_MAE, 'test_MAE':test_MSE}, epoch)\n",
    "#     tensorboard.add_scalars('MSE',{'train_MSE':test_MAE, 'test_MSE':test_MSE}, epoch)\n",
    "    if train_MSE < best_param[\"train_MSE\"]:\n",
    "        best_param[\"train_epoch\"] = epoch\n",
    "        best_param[\"train_MSE\"] = train_MSE\n",
    "    if test_MSE < best_param[\"test_MSE\"]:\n",
    "        best_param[\"test_epoch\"] = epoch\n",
    "        best_param[\"test_MSE\"] = test_MSE\n",
    "        if test_MSE < 1.1:\n",
    "             torch.save(model, 'saved_models/model_'+prefix_filename+'_'+start_time+'_'+str(epoch)+'.pt')\n",
    "    if (epoch - best_param[\"train_epoch\"] >2) and (epoch - best_param[\"test_epoch\"] >18):        \n",
    "        break\n",
    "    print(epoch, train_MSE, test_MSE)\n",
    "    \n",
    "    train(model, train_df, optimizer, loss_function)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "best epoch: 240 \n",
      " test MSE: 1.0443797\n"
     ]
    }
   ],
   "source": [
    "# evaluate model\n",
    "best_model = torch.load('saved_models/model_'+prefix_filename+'_'+start_time+'_'+str(best_param[\"test_epoch\"])+'.pt')     \n",
    "\n",
    "best_model_dict = best_model.state_dict()\n",
    "best_model_wts = copy.deepcopy(best_model_dict)\n",
    "\n",
    "model.load_state_dict(best_model_wts)\n",
    "(best_model.align[0].weight == model.align[0].weight).all()\n",
    "test_MAE, test_MSE = eval(model, test_df)\n",
    "print(\"best epoch:\",best_param[\"test_epoch\"],\"\\n\",\"test MSE:\",test_MSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
