{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "d7429b93",
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "import tensorflow.keras\n",
    "from tensorflow.keras.experimental import WideDeepModel,LinearModel\n",
    "from keras.models import Sequential\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "from itertools import product\n",
    "from keras import Model\n",
    "import math\n",
    "import keras\n",
    "import tensorflow.keras\n",
    "from keras import backend as K\n",
    "from tensorflow.keras.metrics import RootMeanSquaredError\n",
    "from tensorflow.keras.optimizers import Adam,SGD,Adagrad\n",
    "from keras.layers import Dense,Conv1D,Conv2D,Flatten,MaxPool1D,LeakyReLU,MaxPooling1D,BatchNormalization,Input,LSTM,normalization,ReLU\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn.preprocessing import MinMaxScaler,StandardScaler"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "01408bbb",
   "metadata": {},
   "outputs": [],
   "source": [
    "def import_dataset(normalised=True,scaleMethod='Standard'):\n",
    "    '''\n",
    "    Imports Dataset and returns either scaled values depending upon user inputs\n",
    "    \n",
    "    Input:\n",
    "        normalised -- boolean depending upon whether the user wants to scale the values\n",
    "        scaleMethod -- Type of scaler to be used if normalised is True\n",
    "    \n",
    "    Output:\n",
    "        (X_train,X_test,Y_train,Y_test) -- the training and testing dataset\n",
    "        scaler -- used to perform inverse transform if dataset is scaled\n",
    "    '''\n",
    "    dataS = pd.read_csv('Dataset/PCC/CompleteWorkspace.csv')\n",
    "    dataS = data.drop('Unnamed: 0',axis=1)\n",
    "    #dataS[['0','1','2']] = dataS[['0','1','2']]*1000\n",
    "    \n",
    "    if normalised == False:\n",
    "        scaler = 'None'\n",
    "        X = dataS.iloc[:,:7].values\n",
    "        Y = dataS.iloc[:,7:].values\n",
    "        scaler = MinMaxScaler(feature_range=(0,1))\n",
    "        scaler.fit(X)\n",
    "        X = scaler.transform(X)\n",
    "        X_train,X_test,Y_train,Y_test = train_test_split(X,Y,test_size=0.3,random_state=0)\n",
    "        \n",
    "        \n",
    "    elif scaleMethod == 'Standard':\n",
    "        scaler = StandardScaler()\n",
    "        scaler.fit(dataS)\n",
    "        dataS = scaler.transform(dataS)\n",
    "        X = dataS[:,4:]\n",
    "        Y = dataS[:,:4]\n",
    "        X_train,X_test,Y_train,Y_test = train_test_split(X,Y,test_size=0.05,random_state=0)\n",
    "    \n",
    "    elif scaleMethod == 'MinMax':\n",
    "        scaler = MinMaxScaler(feature_range=(0,1))\n",
    "        scaler.fit(dataS)\n",
    "        dataS = scaler.transform(dataS)\n",
    "        X = dataS[:,:7]\n",
    "        #X = np.log(X+np.ones(X.shape))\n",
    "        Y = dataS[:,7:]\n",
    "        X_train,X_test,Y_train,Y_test = train_test_split(X,Y,test_size=0.05,random_state=0)\n",
    "    \n",
    "    return X_train,X_test,Y_train,Y_test,scaler"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "a48d959a",
   "metadata": {},
   "outputs": [],
   "source": [
    "def inverseTransform(scaler,*arr):\n",
    "    '''\n",
    "    Used to perform Inverse Transformation on normalised dataset\n",
    "    \n",
    "    Input:\n",
    "        scaler -- Instance of Normaliser used\n",
    "        *arr -- list of arrays to be concatenated\n",
    "    '''\n",
    "    data = np.concatenate(arr,axis=1)\n",
    "    data = pd.DataFrame(data)\n",
    "    arrInverse = scaler.inverse_transform(data)\n",
    "    \n",
    "    return arrInverse"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "0cb60d5c",
   "metadata": {},
   "outputs": [],
   "source": [
    "def cost(y_test,y_pred):\n",
    "    '''\n",
    "    Calculates error of the model\n",
    "    '''\n",
    "    error = (y_test-y_pred)/y_test\n",
    "    error = np.sum(abs(error))/(y_test.shape[0]*y_test.shape[1])*100\n",
    "    \n",
    "    return error"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "86b51792",
   "metadata": {},
   "outputs": [],
   "source": [
    "def rmse(y_test,y_pred):\n",
    "    error = np.sum((y_test-y_pred)**2)\n",
    "    error = error/(y_test.shape[0]*y_test.shape[1])\n",
    "    error = math.sqrt(error)\n",
    "    return error"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "0893c305",
   "metadata": {},
   "outputs": [],
   "source": [
    "def errorMagnitude(y_true,y_pred):\n",
    "    \n",
    "    minMag = min([min(abs(i)) for i in y_true-y_pred])\n",
    "    maxMag = max([max(abs(i)) for i in y_true-y_pred])\n",
    "    \n",
    "    return (minMag,maxMag)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "3011e9c3",
   "metadata": {},
   "outputs": [],
   "source": [
    "def cross_transformation(X,C):\n",
    "    phi = np.zeros(shape=X.shape)\n",
    "    for i in range(X.shape[1]):\n",
    "        phi[:,i] = X[:,i]**C[i]\n",
    "    phi = np.prod(phi,axis=1)\n",
    "    phi = phi.reshape(phi.shape[0],1)\n",
    "    phi = np.concatenate((X,phi),axis=1)\n",
    "    return phi"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "59cc064b",
   "metadata": {},
   "outputs": [],
   "source": [
    "inputLength = []\n",
    "normal = [340,340,340,340]\n",
    "segLength = 340\n",
    "for l1 in range(segLength-30,segLength+31):\n",
    "    for l2 in range(segLength-30,segLength+31):\n",
    "        for l3 in range(segLength-30,segLength+31):\n",
    "            for l4 in range(segLength-30,segLength+31):\n",
    "                #value = np.random.randint(-30,30,4)\n",
    "                length = [l1,l2,l3,l4]\n",
    "                inputLength.append(length)\n",
    "inputLength = np.array(inputLength)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "360dbe95",
   "metadata": {},
   "outputs": [],
   "source": [
    "X_train,X_test,Y_train,Y_test,scaler = import_dataset(normalised = False)\n",
    "# X_trainD = X_train.reshape(X_train.shape[0],1,X_train.shape[1])\n",
    "# X_testD = X_test.reshape(X_test.shape[0],1,X_test.shape[1])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "cfe9f32e",
   "metadata": {},
   "outputs": [],
   "source": [
    "arr = np.random.permutation(inputLength.shape[0])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "50766f89",
   "metadata": {},
   "outputs": [],
   "source": [
    "X_train = inputLength[arr[:60000],:]\n",
    "# X_test = inputLength[arr[12461256:],:]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "42ee4ebb",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 113376.7109: 0s - loss:\n",
      "Epoch 2/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 102209.7812\n",
      "Epoch 3/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 82365.6250A: 0s - loss: 85759.05 - ETA: 0s - loss: 84\n",
      "Epoch 4/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 56790.5664\n",
      "Epoch 5/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 30720.3984A: 0s - loss: 37865.7 - ETA: 0s - l\n",
      "Epoch 6/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 10124.5996\n",
      "Epoch 7/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 1021.4244\n",
      "Epoch 8/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 276.1123A: 0s \n",
      "Epoch 9/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 229.8761\n",
      "Epoch 10/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 220.0141A\n",
      "Epoch 11/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 212.3305\n",
      "Epoch 12/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 204.6064\n",
      "Epoch 13/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 198.7801\n",
      "Epoch 14/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 195.7892\n",
      "Epoch 15/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 193.2016\n",
      "Epoch 16/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 190.4589A: 0s - l\n",
      "Epoch 17/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 188.6933A: 0s - lo - ETA: 0s - loss: 188.881\n",
      "Epoch 18/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 188.2482A: 0s - loss: 1\n",
      "Epoch 19/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 184.8705\n",
      "Epoch 20/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 182.5508A\n",
      "Epoch 21/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 180.3062A: 0s - loss: 180.5\n",
      "Epoch 22/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 177.5170A: 0s - \n",
      "Epoch 23/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 175.7804\n",
      "Epoch 24/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 174.3341\n",
      "Epoch 25/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 172.1663\n",
      "Epoch 26/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 170.6099A: 0s - loss: 170. - ETA: 0s - loss: 170.8\n",
      "Epoch 27/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 170.2182\n",
      "Epoch 28/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 169.9175\n",
      "Epoch 29/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 169.3250\n",
      "Epoch 30/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 169.0390\n",
      "Epoch 31/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 168.4360\n",
      "Epoch 32/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 168.5270\n",
      "Epoch 33/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 168.7041A: 0s - loss: 168.70\n",
      "Epoch 34/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 167.7239A: \n",
      "Epoch 35/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 167.7520\n",
      "Epoch 36/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 166.6111\n",
      "Epoch 37/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 165.8286\n",
      "Epoch 38/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 165.1135\n",
      "Epoch 39/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 165.1366\n",
      "Epoch 40/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 164.7036\n",
      "Epoch 41/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 164.5605\n",
      "Epoch 42/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 164.4746A: 0s - loss: 165 - ETA: 0s - loss: 1\n",
      "Epoch 43/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 163.8286\n",
      "Epoch 44/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 163.1548\n",
      "Epoch 45/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 162.6877\n",
      "Epoch 46/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 162.3168\n",
      "Epoch 47/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 162.0090\n",
      "Epoch 48/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 161.1322\n",
      "Epoch 49/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 160.8729\n",
      "Epoch 50/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 160.5783\n",
      "Epoch 51/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 159.7739\n",
      "Epoch 52/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 159.0383\n",
      "Epoch 53/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 158.2855\n",
      "Epoch 54/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 157.2126\n",
      "Epoch 55/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 156.2564\n",
      "Epoch 56/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 154.8986\n",
      "Epoch 57/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 153.3645\n",
      "Epoch 58/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 151.7730\n",
      "Epoch 59/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 150.1293\n",
      "Epoch 60/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 147.9552\n",
      "Epoch 61/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 145.8931\n",
      "Epoch 62/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 143.8836\n",
      "Epoch 63/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 141.649 - 1s 1ms/step - loss: 141.5703\n",
      "Epoch 64/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 140.1422\n",
      "Epoch 65/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 138.2390\n",
      "Epoch 66/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 136.2244\n",
      "Epoch 67/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 135.3934\n",
      "Epoch 68/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 134.4277A: 0s - loss: 134.444\n",
      "Epoch 69/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 133.2176\n",
      "Epoch 70/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 132.1526\n",
      "Epoch 71/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 130.8567\n",
      "Epoch 72/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 130.8201\n",
      "Epoch 73/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 129.9319\n",
      "Epoch 74/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 129.2325\n",
      "Epoch 75/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 128.9368\n",
      "Epoch 76/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 128.3904A: 0s \n",
      "Epoch 77/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 127.6661\n",
      "Epoch 78/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 127.4582A: 0s - loss: - ETA: 0s - loss: 12\n",
      "Epoch 79/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 127.6401\n",
      "Epoch 80/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 127.2721A: 0s - lo\n",
      "Epoch 81/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 126.4111\n",
      "Epoch 82/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 126.7357\n",
      "Epoch 83/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 126.3054\n",
      "Epoch 84/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 125.8188\n",
      "Epoch 85/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 125.8851\n",
      "Epoch 86/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 125.0217A: 0s - loss: 125.\n",
      "Epoch 87/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 125.2956A: 0s - l\n",
      "Epoch 88/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 125.335 - 1s 1ms/step - loss: 125.1814\n",
      "Epoch 89/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 124.7330A: 1s - - ETA: 0s - loss: 124.34 - ETA: 0s - loss: 124.6 - ETA: 0s - loss: 124.56\n",
      "Epoch 90/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "938/938 [==============================] - 1s 2ms/step - loss: 123.7853\n",
      "Epoch 91/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 124.1635A: 0s -\n",
      "Epoch 92/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 124.2129\n",
      "Epoch 93/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 123.7551A: 0s - \n",
      "Epoch 94/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 123.9017\n",
      "Epoch 95/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 123.8746\n",
      "Epoch 96/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 123.3434A: 0s \n",
      "Epoch 97/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 122.2422\n",
      "Epoch 98/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 122.8021A: 0s - loss: 123.6 - ETA: 0s - loss: 122.9 - ETA: 0s - loss: 12\n",
      "Epoch 99/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 122.7268A: 0s - loss: 122\n",
      "Epoch 100/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 122.3788\n",
      "Epoch 101/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 122.5064A: 0s\n",
      "Epoch 102/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 122.4934A: 0s - loss: 122 - ETA: 0s - \n",
      "Epoch 103/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 122.1729\n",
      "Epoch 104/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 122.2059A: 1 - ETA: 0s - loss: 122.14\n",
      "Epoch 105/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 121.6598\n",
      "Epoch 106/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 121.9597\n",
      "Epoch 107/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 121.7951\n",
      "Epoch 108/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 121.3289\n",
      "Epoch 109/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 121.0427\n",
      "Epoch 110/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 121.0855\n",
      "Epoch 111/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 120.3238\n",
      "Epoch 112/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 120.6693\n",
      "Epoch 113/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 120.3610\n",
      "Epoch 114/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 120.2540\n",
      "Epoch 115/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 120.4408\n",
      "Epoch 116/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 120.0723A\n",
      "Epoch 117/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 120.2545\n",
      "Epoch 118/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 120.1880\n",
      "Epoch 119/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 119.4918A: 0s - lo\n",
      "Epoch 120/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 119.2805\n",
      "Epoch 121/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 119.6742A: 0s - loss: 119\n",
      "Epoch 122/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 119.8286\n",
      "Epoch 123/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 119.399 - 1s 2ms/step - loss: 119.4339\n",
      "Epoch 124/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 119.6820A: 0s - loss: 119.\n",
      "Epoch 125/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 118.9691\n",
      "Epoch 126/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 118.9097\n",
      "Epoch 127/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 118.6398\n",
      "Epoch 128/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 118.4249\n",
      "Epoch 129/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 118.8462\n",
      "Epoch 130/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 118.0633\n",
      "Epoch 131/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 118.6039\n",
      "Epoch 132/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 118.3133\n",
      "Epoch 133/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 118.3222\n",
      "Epoch 134/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 117.8031\n",
      "Epoch 135/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 118.1156\n",
      "Epoch 136/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 117.9287\n",
      "Epoch 137/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 117.5273A: 0s - loss - ETA: 0s - loss:\n",
      "Epoch 138/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 117.4224A\n",
      "Epoch 139/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 117.7126A: 0s - los\n",
      "Epoch 140/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 117.1447A: 1s - loss: 11 - ETA: 0s - \n",
      "Epoch 141/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 117.3063A: 0\n",
      "Epoch 142/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 116.8298A: 0s - loss: 116.88 - ETA: 0s - los\n",
      "Epoch 143/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 117.0932A: 0s - loss: 117.14\n",
      "Epoch 144/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 116.9093A: 0s - loss: 1\n",
      "Epoch 145/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 116.6881A: 0s - loss\n",
      "Epoch 146/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 117.2997A: 1s - ETA: 0s - loss: 116. - ETA: 0s - loss: 1\n",
      "Epoch 147/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 116.1603\n",
      "Epoch 148/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 116.4863A: 0s - loss: 116.4\n",
      "Epoch 149/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 116.9458A: 0s - loss\n",
      "Epoch 150/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 116.0616A: 1s - loss: - ETA: 0s \n",
      "Epoch 151/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 116.6697\n",
      "Epoch 152/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 115.8904\n",
      "Epoch 153/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 116.4460A: 1s - loss: 116. - E\n",
      "Epoch 154/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 115.9616A: 0s - loss:\n",
      "Epoch 155/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 116.0430A: 1s - loss: - ETA: 1\n",
      "Epoch 156/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 116.1919A: 1s - lo - ETA: 0s - loss: 11\n",
      "Epoch 157/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 115.8237A: 0s - loss: 116 - ETA: 0s - loss: 1\n",
      "Epoch 158/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 115.7284\n",
      "Epoch 159/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 116.0182\n",
      "Epoch 160/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 115.4225\n",
      "Epoch 161/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 115.3236\n",
      "Epoch 162/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 115.3547\n",
      "Epoch 163/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 114.6214\n",
      "Epoch 164/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 114.9141\n",
      "Epoch 165/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 115.1604\n",
      "Epoch 166/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 114.8617\n",
      "Epoch 167/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 114.5108\n",
      "Epoch 168/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 114.4107\n",
      "Epoch 169/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 114.9928A: 0s - los\n",
      "Epoch 170/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 115.0895\n",
      "Epoch 171/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 114.8217\n",
      "Epoch 172/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 114.8914\n",
      "Epoch 173/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 114.8103\n",
      "Epoch 174/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 114.7829\n",
      "Epoch 175/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 113.6923A: 1s - loss: 113. - ETA: 0s - los - ETA: 0s - loss: 113\n",
      "Epoch 176/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "938/938 [==============================] - 1s 2ms/step - loss: 114.2952A: 0s - loss\n",
      "Epoch 177/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 114.2872\n",
      "Epoch 178/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 113.6998\n",
      "Epoch 179/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 114.3483\n",
      "Epoch 180/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 113.7321\n",
      "Epoch 181/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 113.9851\n",
      "Epoch 182/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 114.1874\n",
      "Epoch 183/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 113.7018\n",
      "Epoch 184/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 113.4548\n",
      "Epoch 185/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 113.6919\n",
      "Epoch 186/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 113.6485A: 0s - loss: \n",
      "Epoch 187/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 113.5455ETA: 0s - loss: 113.549\n",
      "Epoch 188/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 113.8220\n",
      "Epoch 189/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 113.8124\n",
      "Epoch 190/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 112.7043\n",
      "Epoch 191/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 113.0763A: 1s - loss: 113.1 - ETA: 0s - loss: 112.8 - ETA: 0s - loss: 112 - ETA: 0s - loss:\n",
      "Epoch 192/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 112.9064A\n",
      "Epoch 193/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 113.1283A:\n",
      "Epoch 194/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 113.4902\n",
      "Epoch 195/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 112.5717\n",
      "Epoch 196/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 112.6718A: 0s - los\n",
      "Epoch 197/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 112.7452\n",
      "Epoch 198/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 112.6373\n",
      "Epoch 199/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 112.4666\n",
      "Epoch 200/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 112.1273\n",
      "Epoch 201/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 112.6103\n",
      "Epoch 202/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 112.2531\n",
      "Epoch 203/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 112.0837A: 0s - loss:\n",
      "Epoch 204/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 112.1926\n",
      "Epoch 205/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 112.2225\n",
      "Epoch 206/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 111.6612A: 0s \n",
      "Epoch 207/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 111.8884A: \n",
      "Epoch 208/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 111.6770- ETA: 1 - ETA: 0s - loss: 111.8 - 1s 1ms/step - loss: 111.7234\n",
      "Epoch 209/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 112.3772A: 0s - loss: 112.561 - ETA: 0s - loss\n",
      "Epoch 210/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 111.7588\n",
      "Epoch 211/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 111.5535A: 0s -\n",
      "Epoch 212/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 111.7858\n",
      "Epoch 213/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 111.5535\n",
      "Epoch 214/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 111.5563A: 0s - loss: \n",
      "Epoch 215/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 111.2260A: 0s - loss: 111.31\n",
      "Epoch 216/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 111.6013A: 1s - loss: 113 - ETA: 1s  - ETA: 0s - loss: 111\n",
      "Epoch 217/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 110.8798\n",
      "Epoch 218/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.9630A: 0s - loss: 111.58 - ETA: 0s -  - ETA: 0s - loss: 110.\n",
      "Epoch 219/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 111.1230A: 1s - los - ETA: 0s - loss: \n",
      "Epoch 220/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 111.3468A: 0s - loss: 111.4\n",
      "Epoch 221/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 111.3496\n",
      "Epoch 222/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 111.1751A: 0s - loss: 111 - ETA: 0s - lo\n",
      "Epoch 223/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 110.8783A: 0s - l\n",
      "Epoch 224/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 111.4622A\n",
      "Epoch 225/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 111.0375\n",
      "Epoch 226/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 110.4688\n",
      "Epoch 227/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 111.4457A: 0s - los\n",
      "Epoch 228/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.6636A: 0s - loss: 110 - ETA: 0\n",
      "Epoch 229/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 110.6525\n",
      "Epoch 230/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 110.5608\n",
      "Epoch 231/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 111.0027\n",
      "Epoch 232/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.8807\n",
      "Epoch 233/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 110.4178A: 0s - loss: 110.4\n",
      "Epoch 234/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.6156\n",
      "Epoch 235/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.8212\n",
      "Epoch 236/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.6370\n",
      "Epoch 237/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 110.3517\n",
      "Epoch 238/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 110.3087A: 0s - loss:\n",
      "Epoch 239/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.3900\n",
      "Epoch 240/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.5633\n",
      "Epoch 241/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.4394\n",
      "Epoch 242/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.5231A:\n",
      "Epoch 243/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.6649\n",
      "Epoch 244/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.6304\n",
      "Epoch 245/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.4098A: 1s - loss:  - ETA: 1s - loss: 111.047 - ETA: 1s - loss:  - ETA: 0s - los - ETA: 0s - loss: 109\n",
      "Epoch 246/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.4768\n",
      "Epoch 247/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.4163\n",
      "Epoch 248/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.3789A: 0s - loss: \n",
      "Epoch 249/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.9566A: 0s - loss - ETA: 0s - loss: 11\n",
      "Epoch 250/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 110.0472\n",
      "Epoch 251/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.2076\n",
      "Epoch 252/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.1824A:  - ETA: 0s \n",
      "Epoch 253/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.1044\n",
      "Epoch 254/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 110.2080\n",
      "Epoch 255/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 110.0888A: 0s - loss: 110.1\n",
      "Epoch 256/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 109.7903A: 0s - loss: 109.\n",
      "Epoch 257/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 109.8193A: 1 - ETA: 0s - loss: 109.811\n",
      "Epoch 258/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.4254A: 0s - loss: 1 - ETA: 0s - loss: 109. - ETA: 0s - loss: 109.\n",
      "Epoch 259/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.8765A\n",
      "Epoch 260/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "938/938 [==============================] - 1s 2ms/step - loss: 109.7088A: 0s\n",
      "Epoch 261/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.5516\n",
      "Epoch 262/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.9430A: \n",
      "Epoch 263/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 109.9932\n",
      "Epoch 264/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.4187A: 1s - l - ETA: 0s - loss: 109.587 - ETA: 0s - loss: 1\n",
      "Epoch 265/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 109.6154A: 0s - loss: \n",
      "Epoch 266/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.6095\n",
      "Epoch 267/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 109.4826\n",
      "Epoch 268/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 109.4596\n",
      "Epoch 269/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 109.2671A: 0s - loss: 109.0\n",
      "Epoch 270/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 109.6000\n",
      "Epoch 271/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.2711\n",
      "Epoch 272/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.3318\n",
      "Epoch 273/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.5008A: 1s - loss: 109. - ETA: 1s - loss: 110.346 - ETA: 1s - loss: 110. - ETA: \n",
      "Epoch 274/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.3017A: 0s\n",
      "Epoch 275/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.9547A: 0s - loss: 108\n",
      "Epoch 276/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 108.8930\n",
      "Epoch 277/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.9377A: 1s - loss: 109.2 - ETA: 0s - loss: 10 - ETA: 0s - loss: - ETA: 0s - loss: 108\n",
      "Epoch 278/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 109.2305A: \n",
      "Epoch 279/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 109.7696\n",
      "Epoch 280/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.2702\n",
      "Epoch 281/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 108.8694A: 1s - loss: 1 - ETA: 0s \n",
      "Epoch 282/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.0475\n",
      "Epoch 283/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 109.1130\n",
      "Epoch 284/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.3324A: 0s \n",
      "Epoch 285/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 109.5975A: 0s - loss: 109.\n",
      "Epoch 286/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 109.3583A:\n",
      "Epoch 287/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 109.4234A: 0s - \n",
      "Epoch 288/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 109.2957\n",
      "Epoch 289/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 109.0881A: 0s - lo\n",
      "Epoch 290/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 109.0286\n",
      "Epoch 291/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.9031A: 0s - loss: 108.\n",
      "Epoch 292/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 109.3486A: 0s - loss\n",
      "Epoch 293/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.0006TA: 0s - loss: 10\n",
      "Epoch 294/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 109.0238\n",
      "Epoch 295/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 108.8813\n",
      "Epoch 296/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 108.7132\n",
      "Epoch 297/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.8537A: 0s\n",
      "Epoch 298/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.5833\n",
      "Epoch 299/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.8517A: 1s - loss: 109. - ETA: 0s - \n",
      "Epoch 300/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 108.8214A: 0s - loss: 108.632\n",
      "Epoch 301/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 109.2452- ETA: 0s - loss: 109.1 - ETA: 0s - loss: 109.215\n",
      "Epoch 302/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 108.6804\n",
      "Epoch 303/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 109.0383\n",
      "Epoch 304/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.9359\n",
      "Epoch 305/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 108.8709\n",
      "Epoch 306/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 109.036 - 2s 2ms/step - loss: 109.0735\n",
      "Epoch 307/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.8687A: 0s - loss: 109\n",
      "Epoch 308/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.8343\n",
      "Epoch 309/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 108.685 - 2s 2ms/step - loss: 108.7646\n",
      "Epoch 310/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.8116A: 0s - loss: 108.\n",
      "Epoch 311/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.7904\n",
      "Epoch 312/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.6406\n",
      "Epoch 313/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.8578\n",
      "Epoch 314/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.5955\n",
      "Epoch 315/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.6512\n",
      "Epoch 316/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.4827\n",
      "Epoch 317/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.4388A: 0s - loss: - ETA: 0s - loss:\n",
      "Epoch 318/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.8815\n",
      "Epoch 319/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.5988\n",
      "Epoch 320/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.5731\n",
      "Epoch 321/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.4956\n",
      "Epoch 322/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.5277\n",
      "Epoch 323/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 108.5020\n",
      "Epoch 324/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.2438\n",
      "Epoch 325/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 108.2832A: 0s\n",
      "Epoch 326/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.2365\n",
      "Epoch 327/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.2793\n",
      "Epoch 328/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.7712A: 0s - loss: 108.8\n",
      "Epoch 329/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.4005A: 1s - loss: 1 -\n",
      "Epoch 330/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.5117\n",
      "Epoch 331/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.7607A: 0s - \n",
      "Epoch 332/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.8945A: 0s - loss:\n",
      "Epoch 333/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.0442\n",
      "Epoch 334/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.5903A: 0s - loss: 10\n",
      "Epoch 335/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 108.5865A: 1s - - ETA: 0s - loss: \n",
      "Epoch 336/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.2483\n",
      "Epoch 337/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 108.4268\n",
      "Epoch 338/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.1704\n",
      "Epoch 339/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.2667\n",
      "Epoch 340/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.3398A: 0s - loss: 108\n",
      "Epoch 341/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.2402\n",
      "Epoch 342/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 108.3101A: 0s - loss: \n",
      "Epoch 343/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 108.6605A: 1s  - ETA: 0s - loss: - ETA: 0s - loss: 108\n",
      "Epoch 344/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "938/938 [==============================] - 2s 2ms/step - loss: 108.0907A: 0s - loss: 108\n",
      "Epoch 345/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.2055\n",
      "Epoch 346/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.4435A: 0\n",
      "Epoch 347/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.9298\n",
      "Epoch 348/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.3111A: 1s - los - ETA: 0s - loss: 1\n",
      "Epoch 349/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.2966A: 0s - loss: 108.\n",
      "Epoch 350/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6895A: 0s - loss: 107.696\n",
      "Epoch 351/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 107.950 - 1s 1ms/step - loss: 107.9170\n",
      "Epoch 352/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.9660\n",
      "Epoch 353/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 108.2767A: 0s - loss: 108.433\n",
      "Epoch 354/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.8472\n",
      "Epoch 355/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.9849A: 0s - - ETA: 0s - loss: 107.943\n",
      "Epoch 356/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 108.1349- ET - 1s 2ms/step - loss: 108.1602\n",
      "Epoch 357/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.2474\n",
      "Epoch 358/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6599A: 0s - loss: 1 - ETA: 0s - loss: 107.332 - ETA: 0s - loss\n",
      "Epoch 359/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.0051A\n",
      "Epoch 360/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.7718A: 0s - loss: 10\n",
      "Epoch 361/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.9448A: 0s - loss: 1\n",
      "Epoch 362/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.9033A: \n",
      "Epoch 363/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.9683A: 0s - loss: 1\n",
      "Epoch 364/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6778\n",
      "Epoch 365/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 107.4459\n",
      "Epoch 366/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.0263\n",
      "Epoch 367/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.8943\n",
      "Epoch 368/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.9479A: 0s - l\n",
      "Epoch 369/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.9320\n",
      "Epoch 370/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.4949A: 0s - loss: 107. - ETA: 0s \n",
      "Epoch 371/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.5917A: 1 - ETA: 0s - loss: 107.7\n",
      "Epoch 372/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6330A: 0s - los - ETA: 0s - loss: 107.5\n",
      "Epoch 373/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.0078A: 0s - lo\n",
      "Epoch 374/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.7595A: 0s - lo - ETA: 0s - loss: 10\n",
      "Epoch 375/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6117\n",
      "Epoch 376/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.8530\n",
      "Epoch 377/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.8779A: - ETA: 0s - loss: 107.81\n",
      "Epoch 378/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 107.8190\n",
      "Epoch 379/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.4976\n",
      "Epoch 380/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6377\n",
      "Epoch 381/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.7252\n",
      "Epoch 382/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 107.4198- ETA: 0s - los - 1s 1ms/step - loss: 107.5358\n",
      "Epoch 383/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.0832A: 0s - loss: 107.65 - ETA: 0s - loss: 1\n",
      "Epoch 384/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6469A: 0s - lo\n",
      "Epoch 385/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.7162\n",
      "Epoch 386/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.9529A: 0s -\n",
      "Epoch 387/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.8042\n",
      "Epoch 388/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6282\n",
      "Epoch 389/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.8919A: 0s -\n",
      "Epoch 390/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.8873A: 0s \n",
      "Epoch 391/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.7694A: 0s - ETA: 0s - loss: 10\n",
      "Epoch 392/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6516\n",
      "Epoch 393/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.1508A: 0s \n",
      "Epoch 394/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6938\n",
      "Epoch 395/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.4619\n",
      "Epoch 396/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6695A: 0s -\n",
      "Epoch 397/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.0768\n",
      "Epoch 398/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 107.7131- ETA: 0s - loss: 1 - 1s 1ms/step - loss: 107.7977\n",
      "Epoch 399/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.5137\n",
      "Epoch 400/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6717A: 0s - l\n",
      "Epoch 401/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.8089A: 0s - los\n",
      "Epoch 402/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.9850A: 0s -\n",
      "Epoch 403/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.7005A: 0s - loss: \n",
      "Epoch 404/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 107.2447A:  - ETA: 0s - loss: 107.043\n",
      "Epoch 405/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.3249\n",
      "Epoch 406/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.7890\n",
      "Epoch 407/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.5589A: 0s - loss: 107. - ETA: 0s - loss: 107.\n",
      "Epoch 408/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 107.9421\n",
      "Epoch 409/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 107.7561A: 0s - loss\n",
      "Epoch 410/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.8644\n",
      "Epoch 411/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.8086A: 1s - loss: 108 - ETA: 0s - loss: 107.7 - ETA: 0s - loss: 107.75 - ETA: 0s - loss: 107. - ETA: 0s - loss: 1\n",
      "Epoch 412/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.3103A: 0s\n",
      "Epoch 413/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.1428\n",
      "Epoch 414/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.3908A: 0s - \n",
      "Epoch 415/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 108.0558- ETA: 0s -  - ETA: 0s - loss: 108. - 1s 1ms/step - loss: 108.0436\n",
      "Epoch 416/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.3963A: 1s - - ETA: 0s - l\n",
      "Epoch 417/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.4551A: 0s - loss: - ETA: 0s - loss: 107.3\n",
      "Epoch 418/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.9196\n",
      "Epoch 419/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.7554\n",
      "Epoch 420/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.7947\n",
      "Epoch 421/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6335\n",
      "Epoch 422/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.1553A: 0s - \n",
      "Epoch 423/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.8708\n",
      "Epoch 424/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6858\n",
      "Epoch 425/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.8754\n",
      "Epoch 426/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "938/938 [==============================] - 1s 1ms/step - loss: 107.7808A: 0s - loss:\n",
      "Epoch 427/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.2430A: 0s - loss: 107.24 - ETA: 0s - loss: 107.278\n",
      "Epoch 428/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 107.5616- ETA - 1s 1ms/step - loss: 107.5179\n",
      "Epoch 429/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 107.6555\n",
      "Epoch 430/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.5746\n",
      "Epoch 431/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.4052\n",
      "Epoch 432/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 107.488 - 1s 1ms/step - loss: 107.4829\n",
      "Epoch 433/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.5183\n",
      "Epoch 434/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.7446\n",
      "Epoch 435/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 108.0542\n",
      "Epoch 436/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.3691\n",
      "Epoch 437/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.4575\n",
      "Epoch 438/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.5562A: 0s - \n",
      "Epoch 439/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.2905A: 0s - loss: 1\n",
      "Epoch 440/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.1798\n",
      "Epoch 441/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.3735\n",
      "Epoch 442/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.7450A: 0s - l - ETA: 0s - loss: 107.\n",
      "Epoch 443/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.7827\n",
      "Epoch 444/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6033\n",
      "Epoch 445/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.9810\n",
      "Epoch 446/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 107.4913- ETA: 0s - loss: 106. - ETA: 0s - l - 1s 1ms/step - loss: 107.4280\n",
      "Epoch 447/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6406A: 0s\n",
      "Epoch 448/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 107.422 - 1s 1ms/step - loss: 107.4972\n",
      "Epoch 449/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.3336\n",
      "Epoch 450/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.4879A: 0s - loss\n",
      "Epoch 451/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.4028\n",
      "Epoch 452/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.5787\n",
      "Epoch 453/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.3885A: 0s \n",
      "Epoch 454/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.8485A: 0s - loss: 107.800\n",
      "Epoch 455/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.0286A: 0s - lo\n",
      "Epoch 456/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 107.6589\n",
      "Epoch 457/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 107.519 - 1s 1ms/step - loss: 107.6117\n",
      "Epoch 458/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.3450\n",
      "Epoch 459/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.3076\n",
      "Epoch 460/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.5751\n",
      "Epoch 461/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6035A: 0s - loss: 107.437 - ETA: 0s - loss: \n",
      "Epoch 462/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 107.4035A: 0s - loss: 107 - ETA: 0s - loss: 107.\n",
      "Epoch 463/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.3015\n",
      "Epoch 464/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 107.5394A: - ETA: 0s - loss: 107\n",
      "Epoch 465/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.5834A: 1s - loss: - ETA: \n",
      "Epoch 466/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.4695A: 0s - loss: 107.56\n",
      "Epoch 467/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.9166A: 0s - l\n",
      "Epoch 468/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.9391\n",
      "Epoch 469/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.4393\n",
      "Epoch 470/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 107.3647A: 0s - loss: 106.328 - ETA: 0\n",
      "Epoch 471/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.6556\n",
      "Epoch 472/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.3332\n",
      "Epoch 473/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.9314\n",
      "Epoch 474/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.2482\n",
      "Epoch 475/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.2332\n",
      "Epoch 476/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.1936A: 0s - - ETA: 0s - loss: 106.8\n",
      "Epoch 477/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.2641A: 1s - l - ETA: 0s - loss\n",
      "Epoch 478/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.7928\n",
      "Epoch 479/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.3169\n",
      "Epoch 480/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.7091A: 0s - loss: 106.55\n",
      "Epoch 481/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.2135\n",
      "Epoch 482/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.2551\n",
      "Epoch 483/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.9929A: - ETA: 0s - loss: 107.03\n",
      "Epoch 484/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.0954A: 0s\n",
      "Epoch 485/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.9823\n",
      "Epoch 486/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.9528A: 1s -  - ETA: 0s - loss:\n",
      "Epoch 487/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8909\n",
      "Epoch 488/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 107.0960\n",
      "Epoch 489/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.9398\n",
      "Epoch 490/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.9522A: 0s - loss: 1\n",
      "Epoch 491/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 107.0431A: 0s\n",
      "Epoch 492/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8299\n",
      "Epoch 493/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.0109\n",
      "Epoch 494/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.0146\n",
      "Epoch 495/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.1364\n",
      "Epoch 496/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.9872\n",
      "Epoch 497/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 107.0581A: 0s - loss: 107.\n",
      "Epoch 498/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.0556\n",
      "Epoch 499/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4266\n",
      "Epoch 500/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.0377\n",
      "Epoch 501/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.2014\n",
      "Epoch 502/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.6615A: 1s -\n",
      "Epoch 503/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.5895\n",
      "Epoch 504/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6924A\n",
      "Epoch 505/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3524\n",
      "Epoch 506/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8105\n",
      "Epoch 507/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5309\n",
      "Epoch 508/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7287\n",
      "Epoch 509/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4901\n",
      "Epoch 510/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3313\n",
      "Epoch 511/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "938/938 [==============================] - 2s 2ms/step - loss: 107.0474A: 0s - lo\n",
      "Epoch 512/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.8899\n",
      "Epoch 513/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.6444\n",
      "Epoch 514/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4931A: 0s - loss: 106\n",
      "Epoch 515/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.6609\n",
      "Epoch 516/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5414A\n",
      "Epoch 517/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6690\n",
      "Epoch 518/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6331\n",
      "Epoch 519/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5447A: 0s - loss: 10 - ETA: 0s - loss: \n",
      "Epoch 520/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6836\n",
      "Epoch 521/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 107.0975A: 0s - loss: 107.13\n",
      "Epoch 522/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7184\n",
      "Epoch 523/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4929\n",
      "Epoch 524/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7137\n",
      "Epoch 525/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7722A: 0s - loss:  - ETA: 0s - loss: 10\n",
      "Epoch 526/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7853\n",
      "Epoch 527/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.6088\n",
      "Epoch 528/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4630\n",
      "Epoch 529/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3318\n",
      "Epoch 530/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4382\n",
      "Epoch 531/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.7901\n",
      "Epoch 532/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4412\n",
      "Epoch 533/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1791\n",
      "Epoch 534/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5259\n",
      "Epoch 535/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5632\n",
      "Epoch 536/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.8305\n",
      "Epoch 537/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 106.449 - 2s 2ms/step - loss: 106.4594\n",
      "Epoch 538/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4639\n",
      "Epoch 539/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7645A: 0s\n",
      "Epoch 540/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4848A: 1s - loss: 105. - ETA: 1s - loss: 105.866 - ETA: 1s - loss: 105.6 - ETA:\n",
      "Epoch 541/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.5797\n",
      "Epoch 542/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6015\n",
      "Epoch 543/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7042\n",
      "Epoch 544/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5492\n",
      "Epoch 545/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3793A: 0s - los\n",
      "Epoch 546/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4881\n",
      "Epoch 547/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3499\n",
      "Epoch 548/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.4939A: 0s - l\n",
      "Epoch 549/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2139\n",
      "Epoch 550/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 107.0433A: 0s - l\n",
      "Epoch 551/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8832\n",
      "Epoch 552/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3123A: 0s -\n",
      "Epoch 553/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3565\n",
      "Epoch 554/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6109\n",
      "Epoch 555/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8169\n",
      "Epoch 556/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.9480A: 0s - los - ETA: 0s - loss: 107.\n",
      "Epoch 557/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4279A: 0s - loss: 106\n",
      "Epoch 558/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7791A - ETA: 0s - loss: 106.84\n",
      "Epoch 559/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8963A\n",
      "Epoch 560/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6417A: 1s - - ETA: 0s - lo - ETA: 0s - loss: 106.59\n",
      "Epoch 561/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 106.663 - 2s 2ms/step - loss: 106.7148\n",
      "Epoch 562/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2827\n",
      "Epoch 563/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7036\n",
      "Epoch 564/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6451A: 1s -  - ETA: 0s - loss: - ETA: 0s - los\n",
      "Epoch 565/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4379A: 1s - loss: 106.23 - ETA: 1 - ETA: 0s - loss: \n",
      "Epoch 566/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8084A:  - ETA: 0s - loss: 10\n",
      "Epoch 567/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6214A: 1s -  - ETA: 0s - loss\n",
      "Epoch 568/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7699A: 0s - loss: \n",
      "Epoch 569/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7902A: 0s - loss: 106.824\n",
      "Epoch 570/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3901A: 0s - loss: 106.\n",
      "Epoch 571/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8498\n",
      "Epoch 572/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.8351\n",
      "Epoch 573/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7489\n",
      "Epoch 574/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5428\n",
      "Epoch 575/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5549A: 0s - loss:\n",
      "Epoch 576/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3750A: 0\n",
      "Epoch 577/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1548\n",
      "Epoch 578/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4065\n",
      "Epoch 579/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8960A\n",
      "Epoch 580/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4362\n",
      "Epoch 581/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3765\n",
      "Epoch 582/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5704A: 0s - \n",
      "Epoch 583/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4198\n",
      "Epoch 584/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1552\n",
      "Epoch 585/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3282A: 1s - los - \n",
      "Epoch 586/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4802A: 1s - loss: 106.4 - \n",
      "Epoch 587/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7226\n",
      "Epoch 588/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3852\n",
      "Epoch 589/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3981A: \n",
      "Epoch 590/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4473A: 1s - loss: 106.22 - ETA: 1s - loss: 105 - ETA: 0s - loss:  - ETA: 0s - loss: 105. - ETA: 0s - loss: 10\n",
      "Epoch 591/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4409A: 0s - los - ETA: 0s - loss:\n",
      "Epoch 592/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1413A:\n",
      "Epoch 593/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3553\n",
      "Epoch 594/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3155\n",
      "Epoch 595/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3967\n",
      "Epoch 596/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2827\n",
      "Epoch 597/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2611\n",
      "Epoch 598/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2260A: 0s - loss: 106.175\n",
      "Epoch 599/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4233A: 0\n",
      "Epoch 600/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8417A: 0s - loss: 106.\n",
      "Epoch 601/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6369 ETA: 0s - loss: 1\n",
      "Epoch 602/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6484\n",
      "Epoch 603/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7656\n",
      "Epoch 604/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4618\n",
      "Epoch 605/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5041\n",
      "Epoch 606/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3141\n",
      "Epoch 607/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2776\n",
      "Epoch 608/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1967\n",
      "Epoch 609/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0494\n",
      "Epoch 610/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4535A: 0s \n",
      "Epoch 611/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5046\n",
      "Epoch 612/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1815\n",
      "Epoch 613/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7171\n",
      "Epoch 614/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 106.190 - 2s 2ms/step - loss: 106.1572\n",
      "Epoch 615/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7076\n",
      "Epoch 616/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0301\n",
      "Epoch 617/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3617A: 0s - loss: 106.372\n",
      "Epoch 618/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.8119\n",
      "Epoch 619/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6303\n",
      "Epoch 620/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5907\n",
      "Epoch 621/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2164A: \n",
      "Epoch 622/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0391\n",
      "Epoch 623/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.8541\n",
      "Epoch 624/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5791\n",
      "Epoch 625/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2983\n",
      "Epoch 626/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5521\n",
      "Epoch 627/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6752\n",
      "Epoch 628/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8484\n",
      "Epoch 629/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7294\n",
      "Epoch 630/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3949\n",
      "Epoch 631/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5151A: 0s - loss: 106.45\n",
      "Epoch 632/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8472\n",
      "Epoch 633/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3652\n",
      "Epoch 634/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4499\n",
      "Epoch 635/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3623\n",
      "Epoch 636/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6881A: 0s - loss: 1\n",
      "Epoch 637/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6434\n",
      "Epoch 638/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5765A: 0s - loss\n",
      "Epoch 639/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6411A: 0s - loss: 10\n",
      "Epoch 640/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4422\n",
      "Epoch 641/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6454\n",
      "Epoch 642/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3344\n",
      "Epoch 643/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8334A: 1s - loss:  - ETA:\n",
      "Epoch 644/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6117A: 0s - loss: 1\n",
      "Epoch 645/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 106.346 - 2s 2ms/step - loss: 106.2874\n",
      "Epoch 646/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3755A: 1s - - ETA: 0s - ETA: 0s - loss: 106.437\n",
      "Epoch 647/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6534A: 1 - ETA: 0s - loss:  - ETA: 0s - loss: 106.6 - ETA: 0s - loss: 106.58\n",
      "Epoch 648/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4125A: 0s - loss: 106 - ETA: 0s - loss\n",
      "Epoch 649/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1938A: 1s - los - ETA\n",
      "Epoch 650/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2514\n",
      "Epoch 651/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.4868\n",
      "Epoch 652/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3335\n",
      "Epoch 653/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2539\n",
      "Epoch 654/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5463A: 1s - l - ETA: 0s -  - ETA: 0s - loss: 106.51\n",
      "Epoch 655/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 106.2045- ETA: 0s - 2s 2ms/step - loss: 106.1802\n",
      "Epoch 656/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3743\n",
      "Epoch 657/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4165A: 0s - loss\n",
      "Epoch 658/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7513\n",
      "Epoch 659/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0754\n",
      "Epoch 660/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4317A: 0s - \n",
      "Epoch 661/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1860A: 0s - los - ETA: 0s - loss: 106\n",
      "Epoch 662/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6251\n",
      "Epoch 663/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 105.9410A: 0s - loss: \n",
      "Epoch 664/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6670\n",
      "Epoch 665/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3725\n",
      "Epoch 666/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8524A\n",
      "Epoch 667/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1871A: 0s - loss: 106\n",
      "Epoch 668/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5078A: 1s - loss - ETA: 0s - loss: 106.8 - ETA: 0s - \n",
      "Epoch 669/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4689A: 1s - loss: 1 - E\n",
      "Epoch 670/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4380\n",
      "Epoch 671/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4067\n",
      "Epoch 672/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2937A: 0s -\n",
      "Epoch 673/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5866\n",
      "Epoch 674/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3675A: 0s \n",
      "Epoch 675/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.8108A: 0s - loss: 1\n",
      "Epoch 676/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5736A: 0s - \n",
      "Epoch 677/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1406\n",
      "Epoch 678/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7664A: 1s - - ETA: 0s - loss\n",
      "Epoch 679/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3577\n",
      "Epoch 680/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 105.9941A: 0\n",
      "Epoch 681/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3657\n",
      "Epoch 682/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "938/938 [==============================] - 2s 3ms/step - loss: 106.4298\n",
      "Epoch 683/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3971\n",
      "Epoch 684/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1499A: 1s  - ETA: 0s - loss: 106.02\n",
      "Epoch 685/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0904\n",
      "Epoch 686/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5204\n",
      "Epoch 687/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6124\n",
      "Epoch 688/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.5793\n",
      "Epoch 689/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3485\n",
      "Epoch 690/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3528\n",
      "Epoch 691/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4323\n",
      "Epoch 692/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2478A: 0s - loss: 106.4 - ETA: 0s - loss: \n",
      "Epoch 693/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4858\n",
      "Epoch 694/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0801A: 0s - loss: 106.2 - ETA: 0s - loss: 106 - ETA: 0s - loss: - ETA: 0s - loss: 106.06\n",
      "Epoch 695/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5043A: 1s - los - ETA: \n",
      "Epoch 696/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5502\n",
      "Epoch 697/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 105.8834A: 0s - loss:\n",
      "Epoch 698/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5388A: 0s - \n",
      "Epoch 699/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1342A: 0s - loss: 106.2 - ETA: 0s - loss: 106.2\n",
      "Epoch 700/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5708\n",
      "Epoch 701/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5872A: 0s\n",
      "Epoch 702/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4187\n",
      "Epoch 703/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2081\n",
      "Epoch 704/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 106.4792\n",
      "Epoch 705/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3044\n",
      "Epoch 706/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3457A: 1s - ETA: 0s - loss:\n",
      "Epoch 707/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0749A: 0s - loss: 106.110\n",
      "Epoch 708/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4573\n",
      "Epoch 709/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5079A: 0s - loss:\n",
      "Epoch 710/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4320\n",
      "Epoch 711/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3875A: 0s - loss: 106.38 - ETA: 0s - loss: 106 - ETA: 0s - los - ETA: 0s - loss: 106.\n",
      "Epoch 712/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.4579\n",
      "Epoch 713/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2433\n",
      "Epoch 714/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5407\n",
      "Epoch 715/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4640\n",
      "Epoch 716/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5008\n",
      "Epoch 717/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5420A: \n",
      "Epoch 718/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2116A\n",
      "Epoch 719/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.5907\n",
      "Epoch 720/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1241\n",
      "Epoch 721/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4103A: 0s - loss\n",
      "Epoch 722/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3641\n",
      "Epoch 723/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1401A: 1s - loss: 106 - ETA: 1s - loss: 10 - ETA: 0s - loss: 106.5 - ETA: 0s - \n",
      "Epoch 724/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0610\n",
      "Epoch 725/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4859\n",
      "Epoch 726/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.5363\n",
      "Epoch 727/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5272\n",
      "Epoch 728/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5300\n",
      "Epoch 729/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2147\n",
      "Epoch 730/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1794\n",
      "Epoch 731/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3658\n",
      "Epoch 732/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0856\n",
      "Epoch 733/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3353\n",
      "Epoch 734/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.6501\n",
      "Epoch 735/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0327A: 0s\n",
      "Epoch 736/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3778\n",
      "Epoch 737/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0947\n",
      "Epoch 738/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4365A: 0s \n",
      "Epoch 739/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4688A: 1s - loss - ETA: 0s - \n",
      "Epoch 740/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0980A: 0s - lo\n",
      "Epoch 741/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3647A: 0s - loss: 106.\n",
      "Epoch 742/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4290\n",
      "Epoch 743/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3266A: 0s - loss: 10\n",
      "Epoch 744/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2383A: 0\n",
      "Epoch 745/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1583\n",
      "Epoch 746/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7473A: 0s  - ETA: 0s - loss: 106.800\n",
      "Epoch 747/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0095\n",
      "Epoch 748/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4539A: 0s - loss - ETA: 0s - l\n",
      "Epoch 749/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2482A: 1s - - ETA: 0s - loss: - ETA: 0s - loss: 1\n",
      "Epoch 750/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2697\n",
      "Epoch 751/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0078A: 0s - loss: 106.022\n",
      "Epoch 752/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4378\n",
      "Epoch 753/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4240A: \n",
      "Epoch 754/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3675A: 0s - loss\n",
      "Epoch 755/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5642\n",
      "Epoch 756/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.1532A: 0s - l\n",
      "Epoch 757/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1443\n",
      "Epoch 758/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0492A - ETA: 0s - loss: 10\n",
      "Epoch 759/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3919\n",
      "Epoch 760/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 105.9992A: 0s - loss: \n",
      "Epoch 761/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2531A: 0s - loss: 106.45\n",
      "Epoch 762/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2004A:\n",
      "Epoch 763/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3152\n",
      "Epoch 764/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3148\n",
      "Epoch 765/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5467\n",
      "Epoch 766/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.3669A: 0s -  - ETA: 0s - loss: 106\n",
      "Epoch 767/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "938/938 [==============================] - ETA: 0s - loss: 106.140 - 2s 2ms/step - loss: 106.1959\n",
      "Epoch 768/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4819\n",
      "Epoch 769/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5690A: 0s -\n",
      "Epoch 770/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 105.9451A: 0s - loss: 105.9\n",
      "Epoch 771/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 105.9880\n",
      "Epoch 772/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1948A:\n",
      "Epoch 773/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.5231\n",
      "Epoch 774/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5865\n",
      "Epoch 775/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.2575\n",
      "Epoch 776/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.7227A: 0s - loss: 1\n",
      "Epoch 777/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.2634\n",
      "Epoch 778/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4020A: 0s \n",
      "Epoch 779/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0253\n",
      "Epoch 780/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.7646\n",
      "Epoch 781/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5252\n",
      "Epoch 782/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.4991\n",
      "Epoch 783/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3951\n",
      "Epoch 784/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 106.1447\n",
      "Epoch 785/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 106.3169\n",
      "Epoch 786/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 105.8994\n",
      "Epoch 787/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3616A: 0s -\n",
      "Epoch 788/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2839\n",
      "Epoch 789/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 106.6749\n",
      "Epoch 790/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.5750A: 2s - l - ETA: 1s - loss: 107.2 - ETA: 1s - loss: 106.8 - ET\n",
      "Epoch 791/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 105.9151A: 1s - loss: 1 - ETA: 1s - loss: 105.52 - ETA: 1s - loss: 105. - ETA: 1s - loss: 105.59 - ETA: 1s - los - ETA: 0s - loss: 10 - ETA: 0s - loss: 105.\n",
      "Epoch 792/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2301\n",
      "Epoch 793/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4415\n",
      "Epoch 794/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3195\n",
      "Epoch 795/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3068\n",
      "Epoch 796/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4788\n",
      "Epoch 797/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 106.314 - 3s 3ms/step - loss: 106.2674\n",
      "Epoch 798/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.5244\n",
      "Epoch 799/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2608A:\n",
      "Epoch 800/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 106.369 - 3s 3ms/step - loss: 106.3561\n",
      "Epoch 801/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1800\n",
      "Epoch 802/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1123A: 0s - loss: 105.9\n",
      "Epoch 803/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4523\n",
      "Epoch 804/1000\n",
      "938/938 [==============================] - 3s 4ms/step - loss: 106.2911\n",
      "Epoch 805/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.6759\n",
      "Epoch 806/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1967\n",
      "Epoch 807/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3434\n",
      "Epoch 808/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.6473\n",
      "Epoch 809/1000\n",
      "938/938 [==============================] - 3s 4ms/step - loss: 106.3412\n",
      "Epoch 810/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 105.9652\n",
      "Epoch 811/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.5918\n",
      "Epoch 812/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 105.7779\n",
      "Epoch 813/1000\n",
      "938/938 [==============================] - 3s 4ms/step - loss: 106.3730\n",
      "Epoch 814/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3547\n",
      "Epoch 815/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1186\n",
      "Epoch 816/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3893\n",
      "Epoch 817/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.3411\n",
      "Epoch 818/1000\n",
      "938/938 [==============================] - 5s 5ms/step - loss: 106.2174\n",
      "Epoch 819/1000\n",
      "938/938 [==============================] - 5s 5ms/step - loss: 106.3304\n",
      "Epoch 820/1000\n",
      "938/938 [==============================] - 6s 6ms/step - loss: 106.2162\n",
      "Epoch 821/1000\n",
      "938/938 [==============================] - 5s 6ms/step - loss: 106.1152\n",
      "Epoch 822/1000\n",
      "938/938 [==============================] - 5s 5ms/step - loss: 106.3821\n",
      "Epoch 823/1000\n",
      "938/938 [==============================] - 4s 5ms/step - loss: 106.4100\n",
      "Epoch 824/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.0209\n",
      "Epoch 825/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 105.9127\n",
      "Epoch 826/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.0978\n",
      "Epoch 827/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1171\n",
      "Epoch 828/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1654\n",
      "Epoch 829/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.2797\n",
      "Epoch 830/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3508\n",
      "Epoch 831/1000\n",
      "938/938 [==============================] - 3s 4ms/step - loss: 106.4831\n",
      "Epoch 832/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4674\n",
      "Epoch 833/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2926\n",
      "Epoch 834/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4101\n",
      "Epoch 835/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.0432\n",
      "Epoch 836/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 105.9718\n",
      "Epoch 837/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2844A - ETA: 0s - l\n",
      "Epoch 838/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1511\n",
      "Epoch 839/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4346A: 0s - loss: 106.434\n",
      "Epoch 840/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3013\n",
      "Epoch 841/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4201A: 0s -\n",
      "Epoch 842/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2494A: 2s - loss: 105.329 - ETA: 1s \n",
      "Epoch 843/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4309\n",
      "Epoch 844/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4785\n",
      "Epoch 845/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2445\n",
      "Epoch 846/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.3167A: 0s -\n",
      "Epoch 847/1000\n",
      "938/938 [==============================] - 3s 4ms/step - loss: 106.3642\n",
      "Epoch 848/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.1142\n",
      "Epoch 849/1000\n",
      "938/938 [==============================] - 3s 4ms/step - loss: 106.1636\n",
      "Epoch 850/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.3459\n",
      "Epoch 851/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.0207\n",
      "Epoch 852/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3557A: 0s -\n",
      "Epoch 853/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4819A: 0s -\n",
      "Epoch 854/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 106.1145A: 1s - loss\n",
      "Epoch 855/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "938/938 [==============================] - 3s 3ms/step - loss: 106.6744\n",
      "Epoch 856/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.6592\n",
      "Epoch 857/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.0743\n",
      "Epoch 858/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 106.4604A: 0s - loss: 106.325 - ETA: 0s - loss: 106.\n",
      "Epoch 859/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1651A: 1s - lo - ETA: 0s - loss:\n",
      "Epoch 860/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 106.0438\n",
      "Epoch 861/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3531A\n",
      "Epoch 862/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 106.099 - 4s 4ms/step - loss: 106.0902\n",
      "Epoch 863/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 105.9557\n",
      "Epoch 864/1000\n",
      "938/938 [==============================] - 3s 4ms/step - loss: 106.0216\n",
      "Epoch 865/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2841A: 2s - loss: - ETA: 1s - loss: 1 - ETA\n",
      "Epoch 866/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4982A: 2s - loss: 105. - ETA: 2s  - ETA: 1s - loss: 106.2\n",
      "Epoch 867/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2871\n",
      "Epoch 868/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4912\n",
      "Epoch 869/1000\n",
      "938/938 [==============================] - 3s 4ms/step - loss: 106.6821A: 0s -\n",
      "Epoch 870/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4634\n",
      "Epoch 871/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3574A: 0s - los\n",
      "Epoch 872/1000\n",
      "938/938 [==============================] - 3s 4ms/step - loss: 106.0574\n",
      "Epoch 873/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 106.0900\n",
      "Epoch 874/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.0726\n",
      "Epoch 875/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3418\n",
      "Epoch 876/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.0655A: 0s - loss: 105.\n",
      "Epoch 877/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.5059\n",
      "Epoch 878/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.0014\n",
      "Epoch 879/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2838A: 0s -\n",
      "Epoch 880/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.0341\n",
      "Epoch 881/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4822\n",
      "Epoch 882/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4819\n",
      "Epoch 883/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2668A: 0s - loss:\n",
      "Epoch 884/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1073\n",
      "Epoch 885/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2821\n",
      "Epoch 886/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3681\n",
      "Epoch 887/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4452A: 0s - loss: 106.461\n",
      "Epoch 888/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1382\n",
      "Epoch 889/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2163\n",
      "Epoch 890/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 105.9041A: 1s \n",
      "Epoch 891/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3097A: 1s - los - ETA: 0s \n",
      "Epoch 892/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.5749\n",
      "Epoch 893/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.0239\n",
      "Epoch 894/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.0558A: 0s - loss: 106.0\n",
      "Epoch 895/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1156A: 0s - loss\n",
      "Epoch 896/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1037\n",
      "Epoch 897/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 105.8497A: 1s - - ETA: 0s -  - ETA: 0s - loss: 105\n",
      "Epoch 898/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.0607A: 0s - loss: 1\n",
      "Epoch 899/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.4073A: \n",
      "Epoch 900/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 106.4564A: 0s \n",
      "Epoch 901/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4365\n",
      "Epoch 902/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.5844\n",
      "Epoch 903/1000\n",
      "938/938 [==============================] - 3s 4ms/step - loss: 106.3789\n",
      "Epoch 904/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1051\n",
      "Epoch 905/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2125A: 0s - l - ETA: 0s - loss: 106.1\n",
      "Epoch 906/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2487A: 0s - loss: 106.231\n",
      "Epoch 907/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 106.0644- - ETA: 0s - loss: 105.890 - ETA: 0s - los - 2s 3ms/step - loss: 106.0185\n",
      "Epoch 908/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 105.8779\n",
      "Epoch 909/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2107\n",
      "Epoch 910/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 105.9842\n",
      "Epoch 911/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.0128A: 0\n",
      "Epoch 912/1000\n",
      "938/938 [==============================] - 4s 5ms/step - loss: 106.2106\n",
      "Epoch 913/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.0237A: 0s - loss: 105.9\n",
      "Epoch 914/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3525A: 1s - loss: 106 - ETA: 0s - \n",
      "Epoch 915/1000\n",
      "938/938 [==============================] - 3s 4ms/step - loss: 106.2905\n",
      "Epoch 916/1000\n",
      "938/938 [==============================] - 4s 4ms/step - loss: 106.2271\n",
      "Epoch 917/1000\n",
      "938/938 [==============================] - 3s 4ms/step - loss: 106.0709\n",
      "Epoch 918/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 105.9954\n",
      "Epoch 919/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1653A: 0s - loss: 106.2\n",
      "Epoch 920/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.0916\n",
      "Epoch 921/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.0662\n",
      "Epoch 922/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2483\n",
      "Epoch 923/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 105.7488\n",
      "Epoch 924/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3005A: 0s -\n",
      "Epoch 925/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 106.0605\n",
      "Epoch 926/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4222A: \n",
      "Epoch 927/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1382\n",
      "Epoch 928/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.0642A: 2s - loss: 106.56 - ETA:  -  - ETA: 0s - loss: 106.0\n",
      "Epoch 929/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2192A: 0s - loss: 106.\n",
      "Epoch 930/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 105.5203\n",
      "Epoch 931/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 105.862 - 3s 3ms/step - loss: 105.8583\n",
      "Epoch 932/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.0690A: 0s - loss: \n",
      "Epoch 933/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1533\n",
      "Epoch 934/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.4449A: 0s - los\n",
      "Epoch 935/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.2430A: 0s - loss: 106.3 - ETA: 0s - loss: 106.\n",
      "Epoch 936/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.3195ETA: \n",
      "Epoch 937/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 105.9905\n",
      "Epoch 938/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1856A: 0s - loss: 106.41 - ETA: 0s - \n",
      "Epoch 939/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3285A: 1s - loss: 106 - ETA: 1s - loss: 106.470 - ETA - ETA: 0s - lo\n",
      "Epoch 940/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 105.9653A: 1s - loss: 105.886 - ETA: 1s - loss: - ETA: 1s - loss: 106.25 - \n",
      "Epoch 941/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.0485 ETA: 0s - loss: 106.1\n",
      "Epoch 942/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 106.1083- ETA: 1s - loss: 10 - ETA: 1s - loss: 105.966 - ET - ETA: 0s - loss: 106.1 - 2s 2ms/step - loss: 106.1122\n",
      "Epoch 943/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1044A: 1s - loss: 1 - ET\n",
      "Epoch 944/1000\n",
      "938/938 [==============================] - 2s 3ms/step - loss: 106.1462A: 1s - loss: - ETA: 1s - loss: 105.7 - ETA: 1s - loss - ET\n",
      "Epoch 945/1000\n",
      "938/938 [==============================] - 3s 3ms/step - loss: 106.1148\n",
      "Epoch 946/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.3040A: 1s - loss: 1 - ETA: 0s - loss:\n",
      "Epoch 947/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.0674\n",
      "Epoch 948/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.2577A: 0s \n",
      "Epoch 949/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 105.7615\n",
      "Epoch 950/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 105.9838\n",
      "Epoch 951/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.2011A: 0s - loss: 105.18 - ETA: 0s\n",
      "Epoch 952/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 105.7470A: 0s - loss:\n",
      "Epoch 953/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.4106\n",
      "Epoch 954/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.1285A: 0s - loss: 106. - ETA: 0s - loss:\n",
      "Epoch 955/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.0933\n",
      "Epoch 956/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.4376A: 0s - loss: 106.090 - ETA: 0s - loss: 10\n",
      "Epoch 957/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.3264A: 0s - loss: 106.\n",
      "Epoch 958/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.2346\n",
      "Epoch 959/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.3625\n",
      "Epoch 960/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.3046A: 0s  - ETA: 0s - loss: 106.\n",
      "Epoch 961/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.3721A: 0s - loss: - ETA: 0s - loss\n",
      "Epoch 962/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.1751\n",
      "Epoch 963/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 105.8974\n",
      "Epoch 964/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.5394\n",
      "Epoch 965/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 105.8415A: 0s - \n",
      "Epoch 966/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 106.1594A: 0s - l\n",
      "Epoch 967/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 105.9224A: 0s - loss: 105\n",
      "Epoch 968/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.2032\n",
      "Epoch 969/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 105.843 - 2s 2ms/step - loss: 105.9451\n",
      "Epoch 970/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.1527\n",
      "Epoch 971/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.1401\n",
      "Epoch 972/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.0858\n",
      "Epoch 973/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.0641\n",
      "Epoch 974/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.0641\n",
      "Epoch 975/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.3504A: 0s\n",
      "Epoch 976/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.4296\n",
      "Epoch 977/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.5181A\n",
      "Epoch 978/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.2191A: \n",
      "Epoch 979/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.1750\n",
      "Epoch 980/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.4453\n",
      "Epoch 981/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.0961A: 0s - loss: 105.7 - ETA: 0s - loss: 105\n",
      "Epoch 982/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.2720\n",
      "Epoch 983/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 105.9612\n",
      "Epoch 984/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.1665\n",
      "Epoch 985/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.5944\n",
      "Epoch 986/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.4018\n",
      "Epoch 987/1000\n",
      "938/938 [==============================] - ETA: 0s - loss: 106.386 - 1s 1ms/step - loss: 106.3801\n",
      "Epoch 988/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.0564A: 0s - loss: \n",
      "Epoch 989/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.4282A: 0s - loss: 1\n",
      "Epoch 990/1000\n",
      "938/938 [==============================] - 2s 2ms/step - loss: 105.9342A: 1s - l - ETA: 0s -\n",
      "Epoch 991/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.1861A: 0s - loss: 106.21\n",
      "Epoch 992/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.0187\n",
      "Epoch 993/1000\n",
      "938/938 [==============================] - 1s 2ms/step - loss: 106.1470\n",
      "Epoch 994/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.4491A: 0s\n",
      "Epoch 995/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.2421\n",
      "Epoch 996/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.2209\n",
      "Epoch 997/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 105.8796\n",
      "Epoch 998/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.1693\n",
      "Epoch 999/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.2242\n",
      "Epoch 1000/1000\n",
      "938/938 [==============================] - 1s 1ms/step - loss: 106.0028\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "<tensorflow.python.keras.callbacks.History at 0x20894e21280>"
      ]
     },
     "execution_count": 13,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "encoding_dim = 4\n",
    "n_inputs = 4\n",
    "visible = Input(shape=(n_inputs,))\n",
    "e = Dense(n_inputs*2)(visible)\n",
    "e = BatchNormalization()(e)\n",
    "e = ReLU()(e)\n",
    "# define bottleneck\n",
    "n_bottleneck = 2\n",
    "bottleneck = Dense(n_bottleneck)(e)\n",
    "# define decoder\n",
    "d = Dense(n_inputs*2)(bottleneck)\n",
    "d = BatchNormalization()(d)\n",
    "d = ReLU()(d)\n",
    "# output layer\n",
    "output = Dense(n_inputs, activation='linear')(d)\n",
    "# define autoencoder model\n",
    "model = Model(inputs=visible, outputs=output)\n",
    "model.compile(optimizer='rmsprop', loss='mse')\n",
    "model.fit(X_train, X_train,\n",
    "                epochs=1000,\n",
    "                batch_size=64,\n",
    "                shuffle=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "51c43141",
   "metadata": {},
   "outputs": [],
   "source": [
    "encoder = Model(inputs=visible, outputs=bottleneck)\n",
    "X_train_D = encoder.predict(X_train)\n",
    "# scaler_en = MinMaxScaler(feature_range=(0,1))\n",
    "# X_train_D = scaler_en.fit_transform(X_train_D)\n",
    "# X_test_D = scaler_en.transform(X_test_D)\n",
    "# X_trainD_D = X_train_D.reshape(X_train_D.shape[0],1,X_train_D.shape[1])\n",
    "# X_testD_D = X_test_D.reshape(X_test_D.shape[0],1,X_test_D.shape[1])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "d950694c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x208981eec10>]"
      ]
     },
     "execution_count": 17,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAD4CAYAAADxeG0DAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAfJ0lEQVR4nO3deXhU5f338fdNWMMSloSdkLAGSAJCAEWrqCggCAL1casbVdTWp79ff60QBBXFBVGr/lyLa221thJ2UBH3XcDKZCGBEAJhh0BIyEKWuZ8/wtNSiyRhJjlzZj6v6+Limsl47g8h+XByPPd3jLUWERFxr0ZOBxAREd+oyEVEXE5FLiLicipyERGXU5GLiLhcYycWjYyMtDExMU4sLSLiWhs3bjxkrY368fOOFHlMTAwbNmxwYmkREdcyxuw41fO6tCIi4nIqchERl1ORi4i4nIpcRMTlVOQiIi6nIhcRcTkVuYiIy6nIRUQawJHicu5fmU5hWYXfj+3IhiARkVBhrWVN6j7uW5FGQUkF5/aOZMzATn5dQ0UuIlJPDhSWMXdZGmsz9pPQLYI//3IkA7q08fs6KnIRET+z1vLOhl3MX51BeaWX2ePj+OV5sTQOq5+r2SpyERE/2plfwuylHr7MzmdEbHsenZZIbGTLel1TRS4i4gdVXsvrX+Xy+PtZhDUyPHhFPNeOiKZRI1Pva6vIRUR8tHV/ETNTPPxjZwEX9o/ioSkJdG3bosHWV5GLiJyh8kovL366jWc/yqZlszCeumoIk4d0xZj6Pws/mYpcROQMeHYVMHOxh8x9RVw+uCv3XT6QyFbNHMmiIhcRqYPS8iqeWreFlz7PIap1M166IYlL/HxfeF35pciNMa8CE4ED1tp4fxxTRCTQfJOTT3KKh9z8Eq4Z0YPZlw2gTfMmTsfy2xn568CzwBt+Op6ISMAoKqtgwbuZvPntTqLbh/PWLSMZ1SfS6Vj/5Jcit9Z+ZoyJ8cexREQCyUeZ+5mzNI39hWXccl4sv7u0Py2ahjkd69802DVyY8wMYAZAdHR0Qy0rInJGDheX88DKdJb9sId+nVrx/HWjOCu6ndOxTqnBitxauwhYBJCUlGQbal0Rkbqw1rLSs5d5K9IpKqvgv8f05Vej+9C0ceAOi9VdKyIiJ+w7WsbcZams23yAwT3asnBaIv07t3Y6Vo1U5CIS8qy1vL0+j4dXb6bC62XuhAHcfG4sYQ2wvd4f/HX74V+B0UCkMWYXcJ+19hV/HFtEpD7tyC8mOSWVr3PyOadXBxZMS6Bnh/odcuVv/rpr5Rp/HEdEpKFUeS2vfbmdx9dm0aRRIx6ZmsDVw3s0+PZ6f9ClFREJOVn7qodcbcorYMyAjjx4RQKdI5o7HeuMqchFJGSUV3p57uNsnv8kmzbNm/DMNWcxMbGLK8/CT6YiF5GQ8ENeATMXb2LL/mNcMaQr914+iPYtmzodyy9U5CIS1ErLq3hibRavfrmdTm2a8+pNSVwU5+yQK39TkYtI0Ppq2yGSU1LZebiE60ZGkzw+jtYBMOTK31TkIhJ0CssqeGTNZv76XR4xHcJ5e8bZnN2rg9Ox6o2KXESCygcZ+5m7LJWDRce57fxe/PeYfgE35MrfVOQiEhQOHTvOvBXprPLsJa5za166IYnE7m2djtUgVOQi4mrWWpb/sIf7V6ZTfLyK313Sj9su6B3QQ678TUUuIq61p6CUucvS+CjzAGdFVw+56tsp8Idc+ZuKXERcx+u1vPXdTha8m0mV13LvxIHcOCrGNUOu/E1FLiKusv1QMckpHr7dfpjz+kTyyNQEerQPdzqWo1TkIuIKlVVeXvliO3/4YAtNGzdi4bRErkzq7vrt9f6gIheRgJexp5BZKR5Sdx/l0oGdmH9FPJ3auHfIlb+pyEUkYB2vrOLZj7J54ZNttA1vwnPXDuWyhM46C/8RFbmIBKSNO44wK8VD9oFjTB3ajXsmDKRdkAy58jcVuYgElJLySh57P4vXv8qlS5vmvHbzcC7s39HpWAFNRS4iAeOLrYdIXuJh15FSbjinJzPHxdGqmWqqJvoMiYjjjpZU8NCaDP6+YRe9Ilvy99vOYURse6djuYaKXEQc9V7aPu5Znsbh4nLuGN2b/7q4L82bBPeQK39TkYuIIw4WVQ+5Wp26l4Fd2vDaTcOJ7xbhdCxX8kuRG2PGAU8DYcDL1toF/jiuiAQfay1Lvt/NA6syKC2v4q6x/Zlxfi+ahIXOkCt/87nIjTFhwHPAJcAuYL0xZoW1NsPXY4tIcNldUMrdS1L5dMtBhvVsx6PTEunTsZXTsVzPH2fkI4Bsa20OgDHmbWAyoCIXEaB6yNVfvt3Bo+9mYoH7Jw3i+rN70ihEh1z5mz+KvBuQd9LjXcBIPxxXRILAtoPHSE7xsD73CD/rG8nDUzTkyt/8UeSn+ifV/seLjJkBzACIjo72w7IiEsgqqry89HkOT63bSosmYTx+5WCmDe2m7fX1wB9FvgvocdLj7sCeH7/IWrsIWASQlJT0H0UvIsEjbfdRZqV4SN9TyPj4ztw/eRAdW2vIVX3xR5GvB/oaY2KB3cDVwLV+OK6IuExZRRXPfLSVFz/NoV14U164bijjE7o4HSvo+Vzk1tpKY8ydwPtU3374qrU23edkIuIqG3IPMzPFQ87BYn4+rDtzJwygbbiGXDUEv9xHbq1dA6zxx7FExF2OHa/ksfcyeeObHXSNaMEb00dwfr8op2OFFO3sFJEz9umWg9y9JJU9R0u58ZwY7hrbn5YactXg9BkXkTorKCln/qrNpHy/i95RLXnntnNIitGQK6eoyEWkTt5N3cs9y9M5UlLOnRf24c6L+mjIlcNU5CJSKwcKy7h3eTrvpe9jUNc2/Gn6cAZ11ZCrQKAiF5HTstayeOMu5q/KoKzSy6xxcdz6s1gaa8hVwFCRi8hPyjtcwt1LU/l86yFGxLRnwbQEekVpyFWgUZGLyH+o8lre+DqXx97PwgDzJw/iupEachWoVOQi8m+yDxQxKyWVjTuOcEG/KB6emkC3ti2cjiWnoSIXEaB6yNUfP93G/36YTXizMP7wfwYz5SwNuXIDFbmIkLb7KHct9rB5byETErsw7/JBRLVu5nQsqSUVuUgIK6uo4ql1W3np8xw6tGzKH68fxthBnZ2OJXWkIhcJUd/m5JO8JJXth4q5KqkHd08YQESLJk7HkjOgIhcJMUVlFSx8L4s/f7ODHu1b8OYtIzm3T6TTscQHKnKREPJx1gHmLEllb2EZ08+N5fdj+xHeVDXgdvobFAkBR4rLmb8qgyX/2E3fjq1IuWMUQ6PbOR1L/ERFLhLErLWsTt3LfcvTOVpawW8u6sOvL+pDs8YachVMVOQiQWp/YRlzl6XxQcZ+ErtH8JdbRjKgSxunY0k9UJGLBBlrLX/fkMeDqzdTXunl7svimH6uhlwFMxW5SBDZmV9C8hIPX23LZ2Rsex6dlkhMZEunY0k9U5GLBIEqr+X1r3J5/P0swhoZHpoSzzXDozXkKkSoyEVcbsv+ImYu9vBDXgEXxXXkoSnxdInQkKtQoiIXcanySi8vfLKNZz/eSqtmjXn66iFMGtxVQ65CkE9Fboy5EpgHDABGWGs3+COUiJzeprwCZqV4yNxXxKTBXbnv8oF0aKUhV6HK1zPyNGAq8Ec/ZBGRGpSWV/Hkui28/HkOHVs35+UbkhgzsJPTscRhPhW5tXYzoB/lRBrA19vymb3EQ25+CdeMiGb2ZXG0aa4hV9KA18iNMTOAGQDR0dENtayI6xWWVbDg3Uze+nYnPTuE89atIxnVW0Ou5F9qLHJjzDrgVAOK51hrl9d2IWvtImARQFJSkq11QpEQ9uHm/cxZmsaBojJu/Vks/3NJf1o01fZ6+Xc1Frm1dkxDBBGRf8k/dpz7V2awYtMe+ndqzYvXD2NIj7ZOx5IApdsPRQKItZYVm/Zw/8oMisoq+O2YftwxujdNG2t7vfw0X28/nAI8A0QBq40xP1hrx/olmUiI2Xu0lLlL0/gw8wCDe7Rl4bRE+ndu7XQscQFf71pZCiz1UxaRkOT1Wt5en8cjazZT4fUyd8IAbj43ljBtr5da0qUVEQflHiomeYmHb3IOc06vDiyYlkDPDhpyJXWjIhdxQGWVl9e+zOWJD7Jo0qgRC6YmcNXwHtqTIWdERS7SwDL3FTJrsYdNu44yZkAnHrwins4RzZ2OJS6mIhdpIMcrq3ju4208/3E2ES2a8Mw1ZzExsYvOwsVnKnKRBvCPnUeYleJhy/5jTDmrG/dMHEj7lk2djiVBQkUuUo9Kyit5Yu0WXv1yO53bNOfVm5K4KE5DrsS/VOQi9eSr7EMkL0ll5+ESfnF2NLPGxdFaQ66kHqjIRfzsaGkFj6zZzNvr84jpEM7bM87m7F4dnI4lQUxFLuJHa9P3MXdZGoeOHee2C3rx2zH9aN5EQ66kfqnIRfzg0LHjzFuRzirPXuI6t+blG5NI7N7W6VgSIlTkIj6w1rLsh93cvzKDkuNV/O6Sftw+ujdNwjTkShqOilzkDO0pKGXO0lQ+zjrIWdHVQ676dtKQK2l4KnKROvJ6LW9+t5NH382kymu5d+JAbhwVoyFX4hgVuUgd5Bw8RnJKKt/lHua8PpE8MjWBHu3DnY4lIU5FLlILlVVeXv5iO09+sIVmjRux8OeJXDmsu7bXS0BQkYvUIGNPITNTNpG2u5Cxgzoxf3I8HdtoyJUEDhW5yE84XlnFsx9l88In22gb3oTnrxvK+PjOOguXgKMiFzmFjTuqh1xlHzjG1KHduGfCQNppyJUEKBW5yEmKj1fy+NosXv8ql64RLXj95uGM7t/R6Vgip6UiFznh860Hmb0klV1HSrnxnJ7cNS6OVs30LSKBT1+lEvKOllTw4OoM3tm4i15RLXnn9nMYHtPe6VgiteZTkRtjHgMuB8qBbcDN1toCP+QSaRDvpe3jnuVpHC4u51eje/Obi/tqyJW4jq8DIT4A4q21icAWYLbvkUTq34GiMn715kZu/8tGolo1Y/mvz2XmuDiVuLiST2fk1tq1Jz38Bvi5b3FE6pe1lpTvdzN/VQalFVXcNbY/M87vpSFX4mr+vEY+HfjbT33QGDMDmAEQHR3tx2VFamfXkRLuXprGZ1sOMqxnOx6dlkifjq2cjiXisxqL3BizDuh8ig/NsdYuP/GaOUAl8OZPHcdauwhYBJCUlGTPKK3IGfB6LX/+ZgePvpcJwP2TBnH92T1ppCFXEiRqLHJr7ZjTfdwYcyMwEbjYWquCloCy7eAxZi32sGHHEc7vF8XDU+Lp3k5DriS4+HrXyjhgFnCBtbbEP5FEfFdR5WXRZzk8/eFWWjQJ4/ErBzNtaDdtr5eg5Os18meBZsAHJ75BvrHW3u5zKhEfpO0+yqwUD+l7CrksoTPzJg2iY2sNuZLg5etdK338FUTEV2UVVfzvh1v542c5tAtvyou/GMq4+C5OxxKpd9rZKUFhfe5hZi32kHOomCuHdWfuhIFEhDdxOpZIg1CRi6sdO17JwvcyeePrHXRv14I3po/g/H5RTscSaVAqcnGtT7cc5O4lqew5WspNo2K4a2x/WmrIlYQgfdWL6xSUlPPAqgyWfL+b3lEtWXz7OQzrqSFXErpU5OIa1lreTdvHvcvTKCip4M4L+3DnRX00H0VCnopcXOFAYRn3LE/j/fT9xHdrw5+mj2BQ1winY4kEBBW5BDRrLe9s3MWDqzI4XukleXwct5wXS2MNuRL5JxW5BKy8wyXMXpLKF9mHGBHTngXTEugVpSFXIj+mIpeAU+W1vPF1Lgvfy6KRgflXxHPdiGgNuRL5CSpyCSjZB4qYudjD9zsLGN0/ioemJNCtbQunY4kENBW5BISKKi8vfrKNZz7KJrxZGE9eNZgrhmjIlUhtqMjFcam7jnLX4k1k7itiQmIX7p80iMhWzZyOJeIaKnJxTFlFFU+u28JLn+UQ2aoZf7x+GGMHneo9TETkdFTk4ohvc/JJXpLK9kPFXD28B7MvG0BECw25EjkTKnJpUEVlFTz6XiZ/+WYnPdq34M1bRnJun0inY4m4mopcGszHmQe4e2kq+wrL+OV5sfzu0n6EN9WXoIiv9F0k9e5wcTkPrExn2Q976NuxFSl3jGJodDunY4kEDRW51BtrLas8e5m3Ip2jpRX85uK+/PrC3jRrrCFXIv6kIpd6sb+wjDlL01i3eT+J3SP4yy0jGdCljdOxRIKSilz8ylrL39bn8dCazZRXeplz2QBuPjdGQ65E6pGKXPxmZ34JyUs8fLUtn5Gx7Xl0WiIxkS2djiUS9FTk4rMqr+W1L7fz+NosGjdqxMNTErh6eA8NuRJpID4VuTFmPjAZ8AIHgJustXv8EUzcIWtfETNTPGzKK+CiuI48NCWeLhEaciXSkHw9I3/MWnsPgDHmN8C9wO0+p5KAV17p5flPsnnu42xaN2/C01cPYdLgrhpyJeIAn4rcWlt40sOWgPUtjrjBprwCZi72kLW/iMlDunLvxIF00JArEcf4fI3cGPMQcANwFLjwNK+bAcwAiI6O9nVZcUBpeRV/+CCLV77YTsfWzXn5hiTGDOzkdCyRkGesPf1JtDFmHXCqkXRzrLXLT3rdbKC5tfa+mhZNSkqyGzZsqGtWcdDX2/JJXuJhR34J146MJnl8HG2aa8iVSEMyxmy01ib9+Pkaz8ittWNqucZbwGqgxiIX9ygsq+CRNZn89bud9OwQzlu3jmRUbw25Egkkvt610tdau/XEw0lApu+RJFCsy9jPnGWpHCw6zozze/HbMf1o0VTb60UCja/XyBcYY/pTffvhDnTHSlDIP3ac+1dmsGLTHuI6t2bR9UkM7tHW6Vgi8hN8vWtlmr+CiPOstazYtId5K9I5dryS347pxx2je9O0sbbXiwQy7ewUAPYeLWXu0jQ+zDzAkB5tWfjzRPp1au10LBGpBRV5iPN6LX9dv5NH1mRS6fUyd8IAbj43ljBtrxdxDRV5CNt+qJjkFA/fbj/MqN4dWDA1kegO4U7HEpE6UpGHoMoqL69+uZ0n1m6haVgjFkxN4KrhPbS9XsSlVOQhZvPeQmalePDsOsqYAZ148Ip4Okc0dzqWiPhARR4ijldW8dzH23j+42wiWjTh2WvPYkJCF52FiwQBFXkI+H7nEWYt9rD1wDGmnNWNeycOpF3Lpk7HEhE/UZEHsZLySp5Yu4VXv9xO5zbNee2m4VwY19HpWCLiZyryIPVl9iGSl3jIO1zKL86OZta4OFpryJVIUFKRB5mjpRU8vHozf9uQR2xkS/4242xG9urgdCwRqUcq8iCyNn0fc5elkV9czu0X9Oa/x/SleRMNuRIJdiryIHCw6DjzVqaz2rOXAV3a8MqNw0noHuF0LBFpICpyF7PWsvQfu3lgVQYlx6v4/aX9uO2C3jQJ05ArkVCiInep3QWlzFmayidZBxkaXT3kqk9HDbkSCUUqcpfxei1vfruDBe9m4rVw3+UDueGcGA25EglhKnIXyTl4jOSUVL7LPcx5fSJ5ZGoCPdpryJVIqFORu0BllZeXPt/Ok+u20LxxIxb+PJErh3XX9noRAVTkAS9jTyEzUzaRtruQsYM6MX9yPB3baMiViPyLijxAlVVU8exH2bz46TbahjflheuGMj6hi9OxRCQAqcgD0MYdh5m52MO2g8VMG9qdeyYOoG24hlyJyKmpyANI8fFKHns/iz99nUvXiBb8afoILugX5XQsEQlwfilyY8zvgceAKGvtIX8cM9R8tuUgs5eksudoKTec3ZO7xsXRqpn+nRWRmvncFMaYHsAlwE7f44SeoyUVzF+dweKNu+gV1ZK/33YOw2PaOx1LRFzEH6d8TwIzgeV+OFZIeS9tL/csT+dwcTm/Gt2b31ysIVciUnc+FbkxZhKw21q7qaZ7mo0xM4AZANHR0b4s63oHisq4b3k676btY2CXNrx203Diu2nIlYicmRqL3BizDuh8ig/NAe4GLq3NQtbaRcAigKSkJFuHjEHDWsvijbt4cPVmSiuquGtsf2ac30tDrkTEJzUWubV2zKmeN8YkALHA/z8b7w58b4wZYa3d59eUQSDvcAl3L03l862HSOrZjgXTEunTsZXTsUQkCJzxpRVrbSrwzzeANMbkAkm6a+Xfeb2WN77OZeH7WRjggcmD+MXInjTSkCsR8RPd31aPsg8cIznFw4YdRzi/XxQPT4mnezsNuRIR//JbkVtrY/x1LLerqPKy6LMcnl63lRZNw3jiysFMHdpNQ65EpF7ojNzP0nYfZeZiDxl7C7ksoTP3T4onqnUzp2OJSBBTkftJWUUVT3+4lUWf5dC+ZVNe/MVQxsVryJWI1D8VuR+szz3MrMUecg4Vc+Ww7sydMJCI8CZOxxKREKEi98Gx45UsfC+TN77eQfd2LfjzL0fws74aciUiDUtFfoY+yTrAnKVp7Dlays3nxvD7S/vTUkOuRMQBap46OlJczvzVGSz5fjd9OrZi8e2jGNazndOxRCSEqchryVrLmtR93LcijYKSCv7vRX2486I+NGusIVci4iwVeS0cKCxj7rI01mbsJ6FbBG9MH8nArm2cjiUiAqjIT8tayzsbdjF/dQbllV6Sx8dxy3mxNNaQKxEJICryn5B3uITZS1L5IvsQI2Lbs2BqAr2iNORKRAKPivxHqryWP32Vy2PvZxHWyPDgFfFcOyJaQ65EJGCpyE+ydX8RM1M8/GNnAaP7R/HwlAS6tm3hdCwRkdNSkQPllV5e/HQbz36UTctmYTx11RAmD+mqIVci4gohX+SeXQXMXOwhc18RExO7MG/SICJbaciViLhHyBZ5WUUVT36whZc+zyGqdTMWXT+MSwed6h3tREQCW0gW+Tc5+SSneMjNL+GaET1IHj+AiBYaciUi7hRSRV5UVsGCdzN589udRLcP561bRjKqT6TTsUREfBIyRf5R5n7mLE1jf2EZt5wXy/9c2o/wpiHzxxeRIBb0TXa4uJwHVqaz7Ic99O3YiufvGMVZ0RpyJSLBI2iL3FrLSs9e5q1Ip7C0gv+6uC+/urC3hlyJSNAJyiLfd7R6yNW6zfsZ3D2CR28dSVxnDbkSkeDkU5EbY+YBtwIHTzx1t7V2ja+hzpS1lrfX5/Hw6s1UeL3MuWwA08+LJUzb60UkiPnjjPxJa+3jfjiOT3bkF5OcksrXOfmc3as9C6YmEhPZ0ulYIiL1zvWXVqq8lte+3M7ja7No0qgRD09J4OrhPTTkSkRChj+K/E5jzA3ABuB31tojfjhmrWTtqx5ytSmvgIvjOvLglHi6RGjIlYiElhqL3BizDjjV3vU5wAvAfMCe+P0JYPpPHGcGMAMgOjr6DONWK6/08vwn2Tz3cTatmzfh6auHMGmwhlyJSGgy1lr/HMiYGGCVtTa+ptcmJSXZDRs2nNE6P+QVMGuxh6z9RUwe0pV7Jw6kg4ZciUgIMMZstNYm/fh5X+9a6WKt3Xvi4RQgzZfj1eSZD7fy5LotdGzdnFduTOLiAZ3qczkREVfw9Rr5QmPMEKovreQCt/ka6HSiO4Rz9YhoksfH0aa5hlyJiICPRW6tvd5fQWpj8pBuTB7SrSGXFBEJeHo7eBERl1ORi4i4nIpcRMTlVOQiIi6nIhcRcTkVuYiIy6nIRURcTkUuIuJyfpu1UqdFjTkI7KjjfxYJHKqHOA1B2Z3j5vxuzg7uzh+o2Xtaa6N+/KQjRX4mjDEbTjUsxg2U3Tluzu/m7ODu/G7LrksrIiIupyIXEXE5NxX5IqcD+EDZnePm/G7ODu7O76rsrrlGLiIip+amM3IRETkFFbmIiMu5psiNMfOMMbuNMT+c+HWZ05nOhDHm98YYa4yJdDpLbRlj5htjPCc+72uNMV2dzlQXxpjHjDGZJ/4MS40xbZ3OVFvGmCuNMenGGK8xxhW3wxljxhljsowx2caYZKfz1IUx5lVjzAFjTL2+baW/uabIT3jSWjvkxK81ToepK2NMD+ASYKfTWeroMWttorV2CLAKuNfhPHX1ARBvrU0EtgCzHc5TF2nAVOAzp4PUhjEmDHgOGA8MBK4xxgx0NlWdvA6MczpEXbmtyN3uSWAm1e9x6hrW2sKTHrbEffnXWmsrTzz8BujuZJ66sNZuttZmOZ2jDkYA2dbaHGttOfA2MNnhTLVmrf0MOOx0jrpyW5HfeeLH41eNMe2cDlMXxphJwG5r7Sans5wJY8xDxpg84Drcd0Z+sunAu06HCGLdgLyTHu868ZzUI5/efNnfjDHrgM6n+NAc4AVgPtVng/OBJ6j+pgwYNeS/G7i0YRPV3umyW2uXW2vnAHOMMbOBO4H7GjRgDWrKf+I1c4BK4M2GzFaT2mR3EXOK51z1E5wbBVSRW2vH1OZ1xpiXqL5WG1B+Kr8xJgGIBTYZY6D6R/vvjTEjrLX7GjDiT6rt5x54C1hNgBV5TfmNMTcCE4GLbYBtnqjD594NdgE9TnrcHdjjUJaQ4ZpLK8aYLic9nEL1/wRyBWttqrW2o7U2xlobQ/UX+9BAKfGaGGP6nvRwEpDpVJYzYYwZB8wCJllrS5zOE+TWA32NMbHGmKbA1cAKhzMFPdfs7DTG/BkYQvWPabnAbdbavU5mOlPGmFwgyVobiGMy/4MxJgXoD3ipHj98u7V2t7Opas8Ykw00A/JPPPWNtfZ2ByPVmjFmCvAMEAUUAD9Ya8c6GqoGJ24NfgoIA1611j7kbKLaM8b8FRhN9Rjb/cB91tpXHA1VC64pchEROTXXXFoREZFTU5GLiLicilxExOVU5CIiLqciFxFxORW5iIjLqchFRFzu/wGvnc7Y9bquWwAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "plt.plot(X_train_D[0],X_train_D[1])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "b7b03940",
   "metadata": {},
   "outputs": [],
   "source": [
    "encoder.save_weights(\"Bottleneck.ckpt\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c049bbb6",
   "metadata": {},
   "outputs": [],
   "source": [
    "data = np.concatenate((X_train_D,Y_train),axis=1)\n",
    "dataT = np.concatenate((X_test_D,Y_test),axis=1)\n",
    "data = np.concatenate((data,dataT),axis=0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6a168a89",
   "metadata": {},
   "outputs": [],
   "source": [
    "data = pd.DataFrame(data)\n",
    "data.to_csv('AutoDecoData.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "0404dc4c",
   "metadata": {},
   "outputs": [],
   "source": [
    "lst = list(product([0, 1], repeat=7))\n",
    "linear_X = cross_transformation(X_train,lst[5])\n",
    "linear_Xt = cross_transformation(X_test,lst[5])\n",
    "linear_model = LinearModel()\n",
    "dnn_model = Sequential([Dense(520,'relu'),\n",
    "                        BatchNormalization(),\n",
    "                        Dense(520,'relu'),\n",
    "                        BatchNormalization(),\n",
    "                        Dense(1040,'relu'), \n",
    "                        Dense(units=Y_train.shape[1],activation='linear')])\n",
    "\n",
    "# dnn_model = Sequential([keras.layers.Conv1D(32, 3, input_shape=(X_train_D.shape[1],X_train_D.shape[-1]), padding=\"same\"),\n",
    "#                         keras.layers.ReLU(name=\"relu1\"),\n",
    "#                         keras.layers.MaxPooling1D(2),\n",
    "#                         keras.layers.Conv2D(64, 3, padding=\"same\"),\n",
    "#                         keras.layers.ReLU(name=\"relu2\"),\n",
    "#                         keras.layers.MaxPooling1D(2),\n",
    "#                         keras.layers.Conv1D(64, 3, padding=\"same\"),\n",
    "#                         keras.layers.ReLU(name=\"relu3\"),\n",
    "#                         keras.layers.Flatten(),\n",
    "#                         keras.layers.Dense(64, activation=\"relu\"),\n",
    "#                         keras.layers.Dense(Y_train.shape[1],activation='sigmoid')])\n",
    "\n",
    "combined_model = WideDeepModel(linear_model, dnn_model)\n",
    "#dnn_model.summary()\n",
    "# model_wrapper = lm.ModelWrapper(tf.keras.models.clone_model(dnn_model))\n",
    "# model_wrapper.compile(\n",
    "#     optimizer=tf.keras.optimizers.SGD(learning_rate=0.1),\n",
    "#     loss=lm.MeanSquaredError(),\n",
    "#     metrics=['accuracy'])\n",
    "# model_wrapper.fit(X_trainD,Y_train,epochs=5)\n",
    "\n",
    "lr_schedule = tensorflow.keras.optimizers.schedules.ExponentialDecay(\n",
    "    initial_learning_rate=0.1,\n",
    "    decay_steps=10000,\n",
    "    decay_rate=0.9)\n",
    "optimizer = tensorflow.keras.optimizers.Adam(learning_rate=0.01)\n",
    "combined_model.compile(optimizer,'mse',metrics=[RootMeanSquaredError()])\n",
    "combined_model.fit([linear_X,X_train], Y_train, 128,epochs=1,shuffle=True,validation_data=([linear_Xt,X_test], Y_test))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "28def9cd",
   "metadata": {},
   "outputs": [],
   "source": [
    "Y_pred = combined_model.predict([linear_Xt,X_test])\n",
    "u = np.sum(np.square(Y_test-Y_pred))\n",
    "v = np.sum(np.square(Y_test - np.mean(Y_test)))\n",
    "R = 1 - u/v"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6e4c0c2a",
   "metadata": {},
   "outputs": [],
   "source": [
    "R"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b40d42d1",
   "metadata": {},
   "outputs": [],
   "source": [
    "import math\n",
    "true = inverseTransform(scaler,X_test,Y_test)\n",
    "pred = inverseTransform(scaler,X_test,Y_pred)\n",
    "\n",
    "error_tr = rmse(true[:,7:],pred[:,7:])\n",
    "error_tr"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "d46d85b1",
   "metadata": {},
   "outputs": [],
   "source": [
    "max([max(abs(i)) for i in Y_test-Y_pred])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b2959e1c",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "8d29f317",
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.metrics import r2_score\n",
    "r2_score(Y_test, Y_pred,multioutput='variance_weighted')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "345f604c",
   "metadata": {},
   "outputs": [],
   "source": [
    "dnn_model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "1afb2aee",
   "metadata": {},
   "outputs": [],
   "source": [
    "lst = list(product([0, 1], repeat=7))\n",
    "linear_X = cross_transformation(X_train,lst[5])\n",
    "linear_Xt = cross_transformation(X_test,lst[5])\n",
    "linear_model = LinearModel()\n",
    "dnn_model = Sequential([LSTM(60,return_sequences=True,input_shape=(1,X_trainD.shape[-1])),\n",
    "                        LSTM(60,return_sequences=True),\n",
    "                        LSTM(60,return_sequences=True),\n",
    "                        Conv1D(60,kernel_size=3,padding='same'),\n",
    "                        Conv1D(60,kernel_size=3,padding='same'),\n",
    "                        Conv1D(60,kernel_size=3,padding='same'),\n",
    "                        Flatten(),\n",
    "                        Dense(units=Y_train.shape[1],activation='relu')])\n",
    "\n",
    "combined_model = WideDeepModel(linear_model, dnn_model)\n",
    "#dnn_model.summary()\n",
    "# model_wrapper = lm.ModelWrapper(tf.keras.models.clone_model(dnn_model))\n",
    "# model_wrapper.compile(\n",
    "#     optimizer=tf.keras.optimizers.SGD(learning_rate=0.1),\n",
    "#     loss=lm.MeanSquaredError(),\n",
    "#     metrics=['accuracy'])\n",
    "# model_wrapper.fit(X_trainD,Y_train,epochs=5)\n",
    "\n",
    "opt = Adam(lr=0.001)\n",
    "optD = Adam(lr=0.001)\n",
    "dnn_model.compile(opt,'mse',metrics=['accuracy',RootMeanSquaredError()])\n",
    "dnn_model.fit(X_trainD_D, Y_train, 64,epochs=100,validation_data=(X_testD_D, Y_test))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "61b0f48e",
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "fig = plt.figure(figsize=(10, 10))\n",
    "ax = fig.add_subplot(111, projection='3d')\n",
    "ax.scatter3D(X_train_D[:,0],X_train_D[:,1],X_train_D[:,2])\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "9b763143",
   "metadata": {},
   "outputs": [],
   "source": [
    "data = pd.read_csv('Dataset/Static_Model/15000DwithQuat.csv')\n",
    "dataS = data.drop('Unnamed: 0',axis=1)\n",
    "X = dataS.iloc[:,:7].values\n",
    "X[:,:3] = X[:,:3]*1000\n",
    "# unq, count = np.unique(X, axis=0, return_counts=True)\n",
    "# X = unq[count==1]\n",
    "Y = dataS.iloc[:,7:].values\n",
    "# unq, count = np.unique(Y, axis=0, return_counts=True)\n",
    "# Y = unq[count==1]\n",
    "X_train,X_test,Y_train,Y_test = train_test_split(X,Y,test_size=0.3,random_state=0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c07a11a7",
   "metadata": {},
   "outputs": [],
   "source": [
    "dataS[['0','1','2']] = dataS[['0','1','2']]*1000"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "0bf869e9",
   "metadata": {},
   "outputs": [],
   "source": [
    "dataS"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "f8da147a",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
