import datetime

from keras.layers import LSTM, Dense
from numpy import concatenate
from pandas import read_csv, DataFrame
from sklearn.metrics import mean_squared_error, mean_absolute_error
from sklearn.preprocessing import MinMaxScaler
from tensorflow.python.keras.callbacks import EarlyStopping
from tensorflow.python.keras.models import Sequential, load_model
import numpy as np
from tensorflow.python.keras.utils.np_utils import to_categorical
import keras
from util import TIME_STEP, series_to_supervised, draw_data


# 实现多步预测
def bi_lstm():
    # 切换19年和20年数据只需要改下面两行
    multi_dataset = read_csv('./data/2019allday.csv', header=0, index_col=0)
    day_num = 26  # 数据包含的天数
    # multi_dataset = read_csv('./data/多源数据总表.csv', header=0, index_col=None)
    # day_num = 31  # 数据包含的天数

    dataset = DataFrame()
    # 取in_card_flow(流出机场客流)、实际降落载客数 arr_ALDT_passenger、时段、天气作为参数，预测in_card_flow
    dataset['in_flow'] = multi_dataset['in_flow']
    # dataset['out_flow'] = multi_dataset['out_flow']
    dataset['arr_ALDT_passenger'] = multi_dataset['arr_ALDT_passenger']
    dataset['arr_SIBT_passenger'] = multi_dataset['arr_SIBT_passenger']
    dataset['dep_ATOT_passenger'] = multi_dataset['dep_ATOT_passenger']
    dataset['dep_SOBT_passenger'] = multi_dataset['dep_SOBT_passenger']

    dataset['hour'] = multi_dataset['hour']
    dataset['weather'] = multi_dataset['weather']
    dataset['workday'] = multi_dataset['workday']

    dataset.fillna(0, inplace=True)

    PARAMETER_NUM = dataset.shape[1]  # 使用的数据种类数
    values = dataset.values
    # ensure all data is float
    values = values.astype('float32')

    # 归一化特征
    scaler = MinMaxScaler(feature_range=(0, 1))
    values = scaler.fit_transform(values)

    # 构建监督学习问题
    reframed = series_to_supervised(values, TIME_STEP, 1)  # 6步预测下一步
    # 丢弃我们并不想预测的列
    reframed.drop(reframed.columns[[-1, -2, -3, -4, -5, -6, -7]], axis=1, inplace=True)


    # 分割为训练集和测试集
    values = reframed.values
    n_train_time_slice = (day_num - 1) * 100
    train = values[:n_train_time_slice, :]
    test = values[n_train_time_slice:, :]
    # 分为输入输出
    train_X, train_y = train[:, :-1], train[:, -1]
    test_X, test_y = test[:, :-1], test[:, -1]
    # 重塑成3D形状 [样例, 时间步, 特征]
    train_X = train_X.reshape((train_X.shape[0], TIME_STEP, int(train_X.shape[1] / TIME_STEP)))
    test_X = test_X.reshape((test_X.shape[0], TIME_STEP, int(test_X.shape[1] / TIME_STEP)))

    # 设计网络
    model = Sequential()
    # model.add(LSTM(20, input_shape=(train_X.shape[1], train_X.shape[2])))
    model.add(keras.layers.Bidirectional(keras.layers.LSTM(units=20,input_shape=(train_X.shape[1], train_X.shape[2]))))
    model.add(Dense(1))
    model.compile(loss='mse', optimizer='adam')
    # model.summary()
    # 拟合神经网络模型
    early_stopping = EarlyStopping(monitor='val_loss', patience=20, verbose=2)
    model.fit(train_X, train_y, epochs=1000, batch_size=128, validation_data=(test_X, test_y),
              callbacks=[early_stopping], verbose=2,
              shuffle=False)

    yhat = model.predict(test_X, verbose=1)
    yhat = np.array(yhat).reshape(yhat.shape[0], -1)


    # 做出预测
    yhat = model.predict(test_X)
    # 反向转换预测值比例
    test_X = test_X.reshape((test_X.shape[0], -1))
    inv_yhat = concatenate((yhat, test_X[:, 1:PARAMETER_NUM]), axis=1)
    inv_yhat = scaler.inverse_transform(inv_yhat)
    inv_yhat = inv_yhat[:, 0]
    # 反向转换实际值比例
    test_y = test_y.reshape((len(test_y), 1))
    inv_y = concatenate((test_y, test_X[:, 1:PARAMETER_NUM]), axis=1)
    inv_y = scaler.inverse_transform(inv_y)
    inv_y = inv_y[:, 0]
    np.save("test",inv_y)
    np.save("lstm", inv_yhat)
    # 计算RMSE
    # 验证lstm滞后性
    # shift_yhat = inv_yhat[1:]
    # shift_yhat = np.append(shift_yhat, 0)

    mse = mean_squared_error(inv_y, inv_yhat)
    mae = mean_absolute_error(inv_y, inv_yhat)
    # return mse,mae
    print('Test MSE: %.3f' % mse)
    print('Test MAE: %.3f' % mae)
    pic_path = "./picture/lstm.png"
    pic_title = "Bi-LSTM\n MSE=%.3f" % mse
    draw_data(inv_yhat, inv_y, pic_path, pic_title)

def predict_inflow_data():
    # 得到数据
    multi_dataset = read_csv('./data/2019allday.csv', header=0, index_col=0)
    day_num = 26  # 数据包含的天数
    dataset = DataFrame()
    dataset['in_flow'] = multi_dataset['in_flow']
    dataset['arr_ALDT_passenger'] = multi_dataset['arr_ALDT_passenger']
    dataset['arr_SIBT_passenger'] = multi_dataset['arr_SIBT_passenger']
    dataset['dep_ATOT_passenger'] = multi_dataset['dep_ATOT_passenger']
    dataset['dep_SOBT_passenger'] = multi_dataset['dep_SOBT_passenger']
    dataset['hour'] = multi_dataset['hour']
    dataset['weather'] = multi_dataset['weather']
    dataset['workday'] = multi_dataset['workday']
    dataset.fillna(0, inplace=True)

    PARAMETER_NUM = dataset.shape[1]  # 使用的数据种类数
    values = dataset.values
    # ensure all data is float
    values = values.astype('float32')

    # 归一化特征
    scaler = MinMaxScaler(feature_range=(0, 1))
    values = scaler.fit_transform(values)

    # 构建监督学习问题
    reframed = series_to_supervised(values, TIME_STEP, 1)  # 6步预测下一步
    # 丢弃我们并不想预测的列
    reframed.drop(reframed.columns[[-1, -2, -3, -4, -5, -6, -7]], axis=1, inplace=True)

    # 分割为训练集和测试集
    values = reframed.values
    n_train_time_slice = (day_num - 1) * 100
    train = values[:n_train_time_slice, :]
    test = values[n_train_time_slice:, :]
    # 分为输入输出
    train_X, train_y = train[:, :-1], train[:, -1]
    test_X, test_y = test[:, :-1], test[:, -1]
    # 重塑成3D形状 [样例, 时间步, 特征]
    train_X = train_X.reshape((train_X.shape[0], TIME_STEP, int(train_X.shape[1] / TIME_STEP)))
    test_X = test_X.reshape((test_X.shape[0], TIME_STEP, int(test_X.shape[1] / TIME_STEP)))


    model = load_model("./model/inflow.h5")


    yhat = model.predict(test_X, verbose=1)
    yhat = np.array(yhat).reshape(yhat.shape[0], -1)


    # 做出预测
    # 先得到现在的时间
    now_time_hour = datetime.datetime.now().strftime('%H')
    now_time_minute = datetime.datetime.now().strftime('%M')



    yhat = model.predict(test_X)
    # 反向转换预测值比例
    test_X = test_X.reshape((test_X.shape[0], -1))
    inv_yhat = concatenate((yhat, test_X[:, 1:PARAMETER_NUM]), axis=1)
    inv_yhat = scaler.inverse_transform(inv_yhat)
    inv_yhat = inv_yhat[:, 0]
    # 反向转换实际值比例
    test_y = test_y.reshape((len(test_y), 1))
    inv_y = concatenate((test_y, test_X[:, 1:PARAMETER_NUM]), axis=1)
    inv_y = scaler.inverse_transform(inv_y)
    inv_y = inv_y[:, 0]


def train_and_save_inflow_model():
    multi_dataset = read_csv('./data/2019allday.csv', header=0, index_col=0)
    day_num = 26  # 数据包含的天数

    dataset = DataFrame()
    dataset['in_flow'] = multi_dataset['in_flow']
    dataset['arr_ALDT_passenger'] = multi_dataset['arr_ALDT_passenger']
    dataset['arr_SIBT_passenger'] = multi_dataset['arr_SIBT_passenger']
    dataset['dep_ATOT_passenger'] = multi_dataset['dep_ATOT_passenger']
    dataset['dep_SOBT_passenger'] = multi_dataset['dep_SOBT_passenger']
    dataset['hour'] = multi_dataset['hour']
    dataset['weather'] = multi_dataset['weather']
    dataset['workday'] = multi_dataset['workday']
    dataset.fillna(0, inplace=True)

    PARAMETER_NUM = dataset.shape[1]  # 使用的数据种类数
    values = dataset.values
    # ensure all data is float
    values = values.astype('float32')

    # 归一化特征
    scaler = MinMaxScaler(feature_range=(0, 1))
    values = scaler.fit_transform(values)

    # 构建监督学习问题
    reframed = series_to_supervised(values, TIME_STEP, 1)  # 6步预测下一步
    # 丢弃我们并不想预测的列
    reframed.drop(reframed.columns[[-1, -2, -3, -4, -5, -6, -7]], axis=1, inplace=True)

    # 分割为训练集和测试集
    values = reframed.values
    n_train_time_slice = (day_num - 1) * 100
    train = values[:n_train_time_slice, :]
    test = values[n_train_time_slice:, :]
    # 分为输入输出
    train_X, train_y = train[:, :-1], train[:, -1]
    test_X, test_y = test[:, :-1], test[:, -1]
    # 重塑成3D形状 [样例, 时间步, 特征]
    train_X = train_X.reshape((train_X.shape[0], TIME_STEP, int(train_X.shape[1] / TIME_STEP)))
    test_X = test_X.reshape((test_X.shape[0], TIME_STEP, int(test_X.shape[1] / TIME_STEP)))

    # 设计网络
    model = Sequential()
    # model.add(LSTM(20, input_shape=(train_X.shape[1], train_X.shape[2])))
    model.add(keras.layers.Bidirectional(keras.layers.LSTM(units=20, input_shape=(train_X.shape[1], train_X.shape[2]))))
    model.add(Dense(1))
    model.compile(loss='mse', optimizer='adam')
    # model.summary()
    # 拟合神经网络模型
    early_stopping = EarlyStopping(monitor='val_loss', patience=20, verbose=2)
    model.fit(train_X, train_y, epochs=1000, batch_size=128, validation_data=(test_X, test_y),
              callbacks=[early_stopping], verbose=2,
              shuffle=False)
    model.save("./model/inflow.h5")

if __name__ == '__main__':

    lss = 1