import  DataGodotif as DG
from TrainTestPartition import train_X,test_X,test_Y,train_Y,scaler
from math import sqrt
from numpy import concatenate
from sklearn.metrics import mean_squared_error
from keras.models import  Sequential
from keras.layers import Dense
from keras.layers import LSTM
from matplotlib import pyplot
model=Sequential()
model.add(LSTM(50,input_shape=(train_X.shape[1],train_X.shape[2])))
model.add(Dense(2))
model.compile(loss='mae',optimizer='adam')
#fit model
history=model.fit(train_X,train_Y,epochs=50,batch_size=72,validation_data=(test_X,test_Y),verbose=2,shuffle=False)

pyplot.plot(history.history['loss'],label='train')
pyplot.plot(history.history['val_loss'],label='test')
pyplot.legend()
pyplot.show()


# make a prediction
yhat = model.predict(test_X)
test_X = test_X.reshape((test_X.shape[0], test_X.shape[2]))
# invert scaling for forecast
inv_yhat = concatenate((yhat, test_X[:, 2:]), axis=1)
inv_yhat = scaler.inverse_transform(inv_yhat)
inv_yhat = inv_yhat[:, :2]
# invert scaling for actual
test_Y = test_Y.reshape((len(test_Y), 2))
inv_y = concatenate((test_Y, test_X[:, 2:]), axis=1)
inv_y = scaler.inverse_transform(inv_y)
inv_y = inv_y[:, 0]
# calculate RMSE
rmse = sqrt(mean_squared_error(yhat, test_Y))
print('Test RMSE: %.3f' % rmse)

