{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "==> Word Index Built\n",
      "==> Sequence Padded\n",
      "Epoch 1/100 | Batch 0/77 | train_loss: 2.713 | test_loss: 2.709\n",
      "Epoch 1/100 | Batch 50/77 | train_loss: 2.273 | test_loss: 2.239\n",
      "Epoch 2/100 | Batch 0/77 | train_loss: 2.000 | test_loss: 1.958\n",
      "Epoch 2/100 | Batch 50/77 | train_loss: 1.672 | test_loss: 1.548\n",
      "Epoch 3/100 | Batch 0/77 | train_loss: 1.386 | test_loss: 1.334\n",
      "Epoch 3/100 | Batch 50/77 | train_loss: 1.295 | test_loss: 1.154\n",
      "Epoch 4/100 | Batch 0/77 | train_loss: 1.082 | test_loss: 1.045\n",
      "Epoch 4/100 | Batch 50/77 | train_loss: 0.858 | test_loss: 0.778\n",
      "Epoch 5/100 | Batch 0/77 | train_loss: 0.628 | test_loss: 0.628\n",
      "Epoch 5/100 | Batch 50/77 | train_loss: 0.501 | test_loss: 0.472\n",
      "Epoch 6/100 | Batch 0/77 | train_loss: 0.426 | test_loss: 0.408\n",
      "Epoch 6/100 | Batch 50/77 | train_loss: 0.376 | test_loss: 0.363\n",
      "Epoch 7/100 | Batch 0/77 | train_loss: 0.353 | test_loss: 0.335\n",
      "Epoch 7/100 | Batch 50/77 | train_loss: 0.310 | test_loss: 0.312\n",
      "Epoch 8/100 | Batch 0/77 | train_loss: 0.318 | test_loss: 0.294\n",
      "Epoch 8/100 | Batch 50/77 | train_loss: 0.265 | test_loss: 0.281\n",
      "Epoch 9/100 | Batch 0/77 | train_loss: 0.293 | test_loss: 0.263\n",
      "Epoch 9/100 | Batch 50/77 | train_loss: 0.235 | test_loss: 0.253\n",
      "Epoch 10/100 | Batch 0/77 | train_loss: 0.260 | test_loss: 0.232\n",
      "Epoch 10/100 | Batch 50/77 | train_loss: 0.198 | test_loss: 0.216\n",
      "Epoch 11/100 | Batch 0/77 | train_loss: 0.223 | test_loss: 0.211\n",
      "Epoch 11/100 | Batch 50/77 | train_loss: 0.169 | test_loss: 0.187\n",
      "Epoch 12/100 | Batch 0/77 | train_loss: 0.188 | test_loss: 0.183\n",
      "Epoch 12/100 | Batch 50/77 | train_loss: 0.147 | test_loss: 0.174\n",
      "Epoch 13/100 | Batch 0/77 | train_loss: 0.167 | test_loss: 0.163\n",
      "Epoch 13/100 | Batch 50/77 | train_loss: 0.131 | test_loss: 0.152\n",
      "Epoch 14/100 | Batch 0/77 | train_loss: 0.147 | test_loss: 0.159\n",
      "Epoch 14/100 | Batch 50/77 | train_loss: 0.121 | test_loss: 0.144\n",
      "Epoch 15/100 | Batch 0/77 | train_loss: 0.135 | test_loss: 0.143\n",
      "Epoch 15/100 | Batch 50/77 | train_loss: 0.111 | test_loss: 0.134\n",
      "Epoch 16/100 | Batch 0/77 | train_loss: 0.126 | test_loss: 0.140\n",
      "Epoch 16/100 | Batch 50/77 | train_loss: 0.105 | test_loss: 0.126\n",
      "Epoch 17/100 | Batch 0/77 | train_loss: 0.121 | test_loss: 0.126\n",
      "Epoch 17/100 | Batch 50/77 | train_loss: 0.098 | test_loss: 0.119\n",
      "Epoch 18/100 | Batch 0/77 | train_loss: 0.119 | test_loss: 0.122\n",
      "Epoch 18/100 | Batch 50/77 | train_loss: 0.092 | test_loss: 0.111\n",
      "Epoch 19/100 | Batch 0/77 | train_loss: 0.109 | test_loss: 0.115\n",
      "Epoch 19/100 | Batch 50/77 | train_loss: 0.087 | test_loss: 0.107\n",
      "Epoch 20/100 | Batch 0/77 | train_loss: 0.103 | test_loss: 0.109\n",
      "Epoch 20/100 | Batch 50/77 | train_loss: 0.083 | test_loss: 0.102\n",
      "Epoch 21/100 | Batch 0/77 | train_loss: 0.103 | test_loss: 0.107\n",
      "Epoch 21/100 | Batch 50/77 | train_loss: 0.078 | test_loss: 0.100\n",
      "Epoch 22/100 | Batch 0/77 | train_loss: 0.099 | test_loss: 0.102\n",
      "Epoch 22/100 | Batch 50/77 | train_loss: 0.073 | test_loss: 0.096\n",
      "Epoch 23/100 | Batch 0/77 | train_loss: 0.100 | test_loss: 0.103\n",
      "Epoch 23/100 | Batch 50/77 | train_loss: 0.070 | test_loss: 0.093\n",
      "Epoch 24/100 | Batch 0/77 | train_loss: 0.088 | test_loss: 0.097\n",
      "Epoch 24/100 | Batch 50/77 | train_loss: 0.068 | test_loss: 0.090\n",
      "Epoch 25/100 | Batch 0/77 | train_loss: 0.087 | test_loss: 0.093\n",
      "Epoch 25/100 | Batch 50/77 | train_loss: 0.067 | test_loss: 0.089\n",
      "Epoch 26/100 | Batch 0/77 | train_loss: 0.082 | test_loss: 0.094\n",
      "Epoch 26/100 | Batch 50/77 | train_loss: 0.067 | test_loss: 0.080\n",
      "Epoch 27/100 | Batch 0/77 | train_loss: 0.077 | test_loss: 0.091\n",
      "Epoch 27/100 | Batch 50/77 | train_loss: 0.062 | test_loss: 0.078\n",
      "Epoch 28/100 | Batch 0/77 | train_loss: 0.069 | test_loss: 0.083\n",
      "Epoch 28/100 | Batch 50/77 | train_loss: 0.061 | test_loss: 0.076\n",
      "Epoch 29/100 | Batch 0/77 | train_loss: 0.069 | test_loss: 0.085\n",
      "Epoch 29/100 | Batch 50/77 | train_loss: 0.059 | test_loss: 0.076\n",
      "Epoch 30/100 | Batch 0/77 | train_loss: 0.095 | test_loss: 0.091\n",
      "Epoch 30/100 | Batch 50/77 | train_loss: 0.068 | test_loss: 0.100\n",
      "Epoch 31/100 | Batch 0/77 | train_loss: 0.101 | test_loss: 0.095\n",
      "Epoch 31/100 | Batch 50/77 | train_loss: 0.053 | test_loss: 0.076\n",
      "Epoch 32/100 | Batch 0/77 | train_loss: 0.086 | test_loss: 0.112\n",
      "Epoch 32/100 | Batch 50/77 | train_loss: 0.054 | test_loss: 0.074\n",
      "Epoch 33/100 | Batch 0/77 | train_loss: 0.067 | test_loss: 0.111\n",
      "Epoch 33/100 | Batch 50/77 | train_loss: 0.052 | test_loss: 0.072\n",
      "Epoch 34/100 | Batch 0/77 | train_loss: 0.067 | test_loss: 0.085\n",
      "Epoch 34/100 | Batch 50/77 | train_loss: 0.049 | test_loss: 0.069\n",
      "Epoch 35/100 | Batch 0/77 | train_loss: 0.078 | test_loss: 0.118\n",
      "Epoch 35/100 | Batch 50/77 | train_loss: 0.054 | test_loss: 0.071\n",
      "Epoch 36/100 | Batch 0/77 | train_loss: 0.062 | test_loss: 0.083\n",
      "Epoch 36/100 | Batch 50/77 | train_loss: 0.047 | test_loss: 0.072\n",
      "Epoch 37/100 | Batch 0/77 | train_loss: 0.064 | test_loss: 0.121\n",
      "Epoch 37/100 | Batch 50/77 | train_loss: 0.052 | test_loss: 0.070\n",
      "Epoch 38/100 | Batch 0/77 | train_loss: 0.054 | test_loss: 0.082\n",
      "Epoch 38/100 | Batch 50/77 | train_loss: 0.050 | test_loss: 0.071\n",
      "Epoch 39/100 | Batch 0/77 | train_loss: 0.068 | test_loss: 0.113\n",
      "Epoch 39/100 | Batch 50/77 | train_loss: 0.043 | test_loss: 0.071\n",
      "Epoch 40/100 | Batch 0/77 | train_loss: 0.065 | test_loss: 0.114\n",
      "Epoch 40/100 | Batch 50/77 | train_loss: 0.045 | test_loss: 0.077\n",
      "Epoch 41/100 | Batch 0/77 | train_loss: 0.051 | test_loss: 0.076\n",
      "Epoch 41/100 | Batch 50/77 | train_loss: 0.040 | test_loss: 0.073\n",
      "Epoch 42/100 | Batch 0/77 | train_loss: 0.046 | test_loss: 0.069\n",
      "Epoch 42/100 | Batch 50/77 | train_loss: 0.043 | test_loss: 0.072\n",
      "Epoch 43/100 | Batch 0/77 | train_loss: 0.045 | test_loss: 0.069\n",
      "Epoch 43/100 | Batch 50/77 | train_loss: 0.041 | test_loss: 0.066\n",
      "Epoch 44/100 | Batch 0/77 | train_loss: 0.045 | test_loss: 0.066\n",
      "Epoch 44/100 | Batch 50/77 | train_loss: 0.038 | test_loss: 0.065\n",
      "Epoch 45/100 | Batch 0/77 | train_loss: 0.044 | test_loss: 0.068\n",
      "Epoch 45/100 | Batch 50/77 | train_loss: 0.039 | test_loss: 0.060\n",
      "Epoch 46/100 | Batch 0/77 | train_loss: 0.043 | test_loss: 0.066\n",
      "Epoch 46/100 | Batch 50/77 | train_loss: 0.036 | test_loss: 0.058\n",
      "Epoch 47/100 | Batch 0/77 | train_loss: 0.042 | test_loss: 0.064\n",
      "Epoch 47/100 | Batch 50/77 | train_loss: 0.036 | test_loss: 0.058\n",
      "Epoch 48/100 | Batch 0/77 | train_loss: 0.041 | test_loss: 0.059\n",
      "Epoch 48/100 | Batch 50/77 | train_loss: 0.033 | test_loss: 0.057\n",
      "Epoch 49/100 | Batch 0/77 | train_loss: 0.039 | test_loss: 0.063\n",
      "Epoch 49/100 | Batch 50/77 | train_loss: 0.034 | test_loss: 0.060\n",
      "Epoch 50/100 | Batch 0/77 | train_loss: 0.046 | test_loss: 0.068\n",
      "Epoch 50/100 | Batch 50/77 | train_loss: 0.032 | test_loss: 0.060\n",
      "Epoch 51/100 | Batch 0/77 | train_loss: 0.041 | test_loss: 0.059\n",
      "Epoch 51/100 | Batch 50/77 | train_loss: 0.031 | test_loss: 0.056\n",
      "Epoch 52/100 | Batch 0/77 | train_loss: 0.037 | test_loss: 0.060\n",
      "Epoch 52/100 | Batch 50/77 | train_loss: 0.037 | test_loss: 0.071\n",
      "Epoch 53/100 | Batch 0/77 | train_loss: 0.049 | test_loss: 0.081\n",
      "Epoch 53/100 | Batch 50/77 | train_loss: 0.048 | test_loss: 0.065\n",
      "Epoch 54/100 | Batch 0/77 | train_loss: 0.044 | test_loss: 0.092\n",
      "Epoch 54/100 | Batch 50/77 | train_loss: 0.034 | test_loss: 0.057\n",
      "Epoch 55/100 | Batch 0/77 | train_loss: 0.052 | test_loss: 0.074\n",
      "Epoch 55/100 | Batch 50/77 | train_loss: 0.030 | test_loss: 0.065\n",
      "Epoch 56/100 | Batch 0/77 | train_loss: 0.037 | test_loss: 0.075\n",
      "Epoch 56/100 | Batch 50/77 | train_loss: 0.026 | test_loss: 0.071\n",
      "Epoch 57/100 | Batch 0/77 | train_loss: 0.041 | test_loss: 0.063\n",
      "Epoch 57/100 | Batch 50/77 | train_loss: 0.033 | test_loss: 0.059\n",
      "Epoch 58/100 | Batch 0/77 | train_loss: 0.037 | test_loss: 0.070\n",
      "Epoch 58/100 | Batch 50/77 | train_loss: 0.026 | test_loss: 0.073\n",
      "Epoch 59/100 | Batch 0/77 | train_loss: 0.055 | test_loss: 0.075\n",
      "Epoch 59/100 | Batch 50/77 | train_loss: 0.030 | test_loss: 0.068\n",
      "Epoch 60/100 | Batch 0/77 | train_loss: 0.041 | test_loss: 0.058\n",
      "Epoch 60/100 | Batch 50/77 | train_loss: 0.027 | test_loss: 0.061\n",
      "Epoch 61/100 | Batch 0/77 | train_loss: 0.036 | test_loss: 0.059\n",
      "Epoch 61/100 | Batch 50/77 | train_loss: 0.029 | test_loss: 0.086\n",
      "Epoch 62/100 | Batch 0/77 | train_loss: 0.037 | test_loss: 0.066\n",
      "Epoch 62/100 | Batch 50/77 | train_loss: 0.025 | test_loss: 0.074\n",
      "Epoch 63/100 | Batch 0/77 | train_loss: 0.146 | test_loss: 0.105\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 63/100 | Batch 50/77 | train_loss: 0.027 | test_loss: 0.062\n",
      "Epoch 64/100 | Batch 0/77 | train_loss: 0.045 | test_loss: 0.064\n",
      "Epoch 64/100 | Batch 50/77 | train_loss: 0.026 | test_loss: 0.068\n",
      "Epoch 65/100 | Batch 0/77 | train_loss: 0.039 | test_loss: 0.065\n",
      "Epoch 65/100 | Batch 50/77 | train_loss: 0.042 | test_loss: 0.100\n",
      "Epoch 66/100 | Batch 0/77 | train_loss: 0.061 | test_loss: 0.127\n",
      "Epoch 66/100 | Batch 50/77 | train_loss: 0.033 | test_loss: 0.073\n",
      "Epoch 67/100 | Batch 0/77 | train_loss: 0.040 | test_loss: 0.092\n",
      "Epoch 67/100 | Batch 50/77 | train_loss: 0.032 | test_loss: 0.070\n",
      "Epoch 68/100 | Batch 0/77 | train_loss: 0.041 | test_loss: 0.067\n",
      "Epoch 68/100 | Batch 50/77 | train_loss: 0.044 | test_loss: 0.073\n",
      "Epoch 69/100 | Batch 0/77 | train_loss: 0.032 | test_loss: 0.070\n",
      "Epoch 69/100 | Batch 50/77 | train_loss: 0.022 | test_loss: 0.058\n",
      "Epoch 70/100 | Batch 0/77 | train_loss: 0.034 | test_loss: 0.064\n",
      "Epoch 70/100 | Batch 50/77 | train_loss: 0.021 | test_loss: 0.054\n",
      "Epoch 71/100 | Batch 0/77 | train_loss: 0.041 | test_loss: 0.063\n",
      "Epoch 71/100 | Batch 50/77 | train_loss: 0.019 | test_loss: 0.053\n",
      "Epoch 72/100 | Batch 0/77 | train_loss: 0.052 | test_loss: 0.068\n",
      "Epoch 72/100 | Batch 50/77 | train_loss: 0.021 | test_loss: 0.067\n",
      "Epoch 73/100 | Batch 0/77 | train_loss: 0.058 | test_loss: 0.061\n",
      "Epoch 73/100 | Batch 50/77 | train_loss: 0.025 | test_loss: 0.063\n",
      "Epoch 74/100 | Batch 0/77 | train_loss: 0.047 | test_loss: 0.070\n",
      "Epoch 74/100 | Batch 50/77 | train_loss: 0.022 | test_loss: 0.071\n",
      "Epoch 75/100 | Batch 0/77 | train_loss: 0.031 | test_loss: 0.079\n",
      "Epoch 75/100 | Batch 50/77 | train_loss: 0.021 | test_loss: 0.072\n",
      "Epoch 76/100 | Batch 0/77 | train_loss: 0.028 | test_loss: 0.080\n",
      "Epoch 76/100 | Batch 50/77 | train_loss: 0.024 | test_loss: 0.073\n",
      "Epoch 77/100 | Batch 0/77 | train_loss: 0.037 | test_loss: 0.082\n",
      "Epoch 77/100 | Batch 50/77 | train_loss: 0.021 | test_loss: 0.068\n",
      "Epoch 78/100 | Batch 0/77 | train_loss: 0.026 | test_loss: 0.089\n",
      "Epoch 78/100 | Batch 50/77 | train_loss: 0.044 | test_loss: 0.075\n",
      "Epoch 79/100 | Batch 0/77 | train_loss: 0.034 | test_loss: 0.063\n",
      "Epoch 79/100 | Batch 50/77 | train_loss: 0.025 | test_loss: 0.081\n",
      "Epoch 80/100 | Batch 0/77 | train_loss: 0.091 | test_loss: 0.081\n",
      "Epoch 80/100 | Batch 50/77 | train_loss: 0.021 | test_loss: 0.054\n",
      "Epoch 81/100 | Batch 0/77 | train_loss: 0.025 | test_loss: 0.090\n",
      "Epoch 81/100 | Batch 50/77 | train_loss: 0.018 | test_loss: 0.072\n",
      "Epoch 82/100 | Batch 0/77 | train_loss: 0.050 | test_loss: 0.045\n",
      "Epoch 82/100 | Batch 50/77 | train_loss: 0.021 | test_loss: 0.094\n",
      "Epoch 83/100 | Batch 0/77 | train_loss: 0.025 | test_loss: 0.064\n",
      "Epoch 83/100 | Batch 50/77 | train_loss: 0.018 | test_loss: 0.089\n",
      "Epoch 84/100 | Batch 0/77 | train_loss: 0.022 | test_loss: 0.067\n",
      "Epoch 84/100 | Batch 50/77 | train_loss: 0.017 | test_loss: 0.073\n",
      "Epoch 85/100 | Batch 0/77 | train_loss: 0.019 | test_loss: 0.058\n",
      "Epoch 85/100 | Batch 50/77 | train_loss: 0.050 | test_loss: 0.071\n",
      "Epoch 86/100 | Batch 0/77 | train_loss: 0.036 | test_loss: 0.085\n",
      "Epoch 86/100 | Batch 50/77 | train_loss: 0.022 | test_loss: 0.064\n",
      "Epoch 87/100 | Batch 0/77 | train_loss: 0.020 | test_loss: 0.057\n",
      "Epoch 87/100 | Batch 50/77 | train_loss: 0.014 | test_loss: 0.054\n",
      "Epoch 88/100 | Batch 0/77 | train_loss: 0.023 | test_loss: 0.065\n",
      "Epoch 88/100 | Batch 50/77 | train_loss: 0.052 | test_loss: 0.063\n",
      "Epoch 89/100 | Batch 0/77 | train_loss: 0.049 | test_loss: 0.095\n",
      "Epoch 89/100 | Batch 50/77 | train_loss: 0.017 | test_loss: 0.055\n",
      "Epoch 90/100 | Batch 0/77 | train_loss: 0.038 | test_loss: 0.066\n",
      "Epoch 90/100 | Batch 50/77 | train_loss: 0.013 | test_loss: 0.050\n",
      "Epoch 91/100 | Batch 0/77 | train_loss: 0.024 | test_loss: 0.055\n",
      "Epoch 91/100 | Batch 50/77 | train_loss: 0.018 | test_loss: 0.063\n",
      "Epoch 92/100 | Batch 0/77 | train_loss: 0.022 | test_loss: 0.071\n",
      "Epoch 92/100 | Batch 50/77 | train_loss: 0.011 | test_loss: 0.051\n",
      "Epoch 93/100 | Batch 0/77 | train_loss: 0.025 | test_loss: 0.076\n",
      "Epoch 93/100 | Batch 50/77 | train_loss: 0.010 | test_loss: 0.039\n",
      "Epoch 94/100 | Batch 0/77 | train_loss: 0.022 | test_loss: 0.056\n",
      "Epoch 94/100 | Batch 50/77 | train_loss: 0.013 | test_loss: 0.058\n",
      "Epoch 95/100 | Batch 0/77 | train_loss: 0.024 | test_loss: 0.061\n",
      "Epoch 95/100 | Batch 50/77 | train_loss: 0.011 | test_loss: 0.053\n",
      "Epoch 96/100 | Batch 0/77 | train_loss: 0.023 | test_loss: 0.057\n",
      "Epoch 96/100 | Batch 50/77 | train_loss: 0.016 | test_loss: 0.050\n",
      "Epoch 97/100 | Batch 0/77 | train_loss: 0.019 | test_loss: 0.069\n",
      "Epoch 97/100 | Batch 50/77 | train_loss: 0.012 | test_loss: 0.061\n",
      "Epoch 98/100 | Batch 0/77 | train_loss: 0.022 | test_loss: 0.053\n",
      "Epoch 98/100 | Batch 50/77 | train_loss: 0.012 | test_loss: 0.065\n",
      "Epoch 99/100 | Batch 0/77 | train_loss: 0.028 | test_loss: 0.105\n",
      "Epoch 99/100 | Batch 50/77 | train_loss: 0.033 | test_loss: 0.080\n",
      "Epoch 100/100 | Batch 0/77 | train_loss: 0.019 | test_loss: 0.072\n",
      "Epoch 100/100 | Batch 50/77 | train_loss: 0.014 | test_loss: 0.057\n",
      "\n",
      "Source\n",
      "IN: c o m m o n <EOS>\n",
      "\n",
      "Target\n",
      "OUT: c m m m o o <EOS>\n",
      "\n",
      "Source\n",
      "IN: a p p l e <EOS>\n",
      "\n",
      "Target\n",
      "OUT: a e l p p <EOS>\n",
      "\n",
      "Source\n",
      "IN: z h e d o n g <EOS>\n",
      "\n",
      "Target\n",
      "OUT: d e g h n o z <EOS>\n"
     ]
    }
   ],
   "source": [
    "from pointer_net import PointerNetwork\n",
    "import sys\n",
    "import numpy as np\n",
    "if int(sys.version[0]) == 2:\n",
    "    from io import open\n",
    "\n",
    "\n",
    "def read_data(path):\n",
    "    with open(path, 'r', encoding='utf-8') as f:\n",
    "        return f.read()\n",
    "# end function\n",
    "\n",
    "\n",
    "def build_map(data):\n",
    "    specials = ['<GO>',  '<EOS>', '<PAD>', '<UNK>']\n",
    "    chars = list(set([char for line in data.split('\\n') for char in line]))\n",
    "    chars = sorted(chars)\n",
    "    idx2char = {idx: char for idx, char in enumerate(specials+chars)}\n",
    "    char2idx = {char: idx for idx, char in idx2char.items()}\n",
    "    return idx2char, char2idx\n",
    "# end function\n",
    "\n",
    "\n",
    "def preprocess_data(max_len):\n",
    "    X_data = read_data('temp/letters_source.txt')\n",
    "    Y_data = read_data('temp/letters_target.txt')\n",
    "\n",
    "    X_idx2char, X_char2idx = build_map(X_data)\n",
    "    print(\"==> Word Index Built\")\n",
    "\n",
    "    x_unk = X_char2idx['<UNK>']\n",
    "    x_eos = X_char2idx['<EOS>']\n",
    "    x_pad = X_char2idx['<PAD>']\n",
    "\n",
    "    X_indices = []\n",
    "    X_seq_len = []\n",
    "    Y_indices = []\n",
    "    Y_seq_len = []\n",
    "\n",
    "    for x_line, y_line in zip(X_data.split('\\n'), Y_data.split('\\n')):\n",
    "        x_chars = [X_char2idx.get(char, x_unk) for char in x_line]\n",
    "        _x_chars = x_chars + [x_eos] + [x_pad]* (max_len-1-len(x_chars))\n",
    "        \n",
    "        y_chars = [X_char2idx.get(char, x_unk) for char in y_line]\n",
    "        _y_chars = y_chars + [x_eos] + [x_pad]* (max_len-1-len(y_chars))\n",
    "        target = [_x_chars.index(y) for y in _y_chars] # we are predicting the positions\n",
    "\n",
    "        X_indices.append(_x_chars)\n",
    "        Y_indices.append(target)\n",
    "        X_seq_len.append(len(x_chars)+1)\n",
    "        Y_seq_len.append(len(y_chars)+1)\n",
    "\n",
    "    X_indices = np.array(X_indices)\n",
    "    Y_indices = np.array(Y_indices)\n",
    "    X_seq_len = np.array(X_seq_len)\n",
    "    Y_seq_len = np.array(Y_seq_len)\n",
    "    print(\"==> Sequence Padded\")\n",
    "\n",
    "    return X_indices, X_seq_len, Y_indices, Y_seq_len, X_char2idx, X_idx2char\n",
    "# end function\n",
    "\n",
    "\n",
    "def train_test_split(X_indices, X_seq_len, Y_indices, Y_seq_len, BATCH_SIZE):\n",
    "    X_train = X_indices[BATCH_SIZE:]\n",
    "    X_train_len = X_seq_len[BATCH_SIZE:]\n",
    "    Y_train = Y_indices[BATCH_SIZE:]\n",
    "    Y_train_len = Y_seq_len[BATCH_SIZE:]\n",
    "\n",
    "    X_test = X_indices[:BATCH_SIZE]\n",
    "    X_test_len = X_seq_len[:BATCH_SIZE]\n",
    "    Y_test = Y_indices[:BATCH_SIZE]\n",
    "    Y_test_len = Y_seq_len[:BATCH_SIZE]\n",
    "\n",
    "    return (X_train, X_train_len, Y_train, Y_train_len), (X_test, X_test_len, Y_test, Y_test_len)\n",
    "# end function\n",
    "\n",
    "\n",
    "def main():\n",
    "    BATCH_SIZE = 128\n",
    "    MAX_LEN = 15\n",
    "    X_indices, X_seq_len, Y_indices, Y_seq_len, X_char2idx, X_idx2char = preprocess_data(MAX_LEN)\n",
    "    \n",
    "    (X_train, X_train_len, Y_train, Y_train_len), (X_test, X_test_len, Y_test, Y_test_len) \\\n",
    "        = train_test_split(X_indices, X_seq_len, Y_indices, Y_seq_len, BATCH_SIZE)\n",
    "    \n",
    "    model = PointerNetwork(\n",
    "        max_len = MAX_LEN,\n",
    "        rnn_size = 50,\n",
    "        X_word2idx = X_char2idx,\n",
    "        embedding_dim = 15)\n",
    "    \n",
    "    model.fit(X_train, X_train_len, Y_train, Y_train_len,\n",
    "        val_data=(X_test, X_test_len, Y_test, Y_test_len), batch_size=BATCH_SIZE, n_epoch=100)\n",
    "    model.infer('common', X_idx2char)\n",
    "    model.infer('apple', X_idx2char)\n",
    "    model.infer('zhedong', X_idx2char)\n",
    "# end main\n",
    "\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    main()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
