from pandas import DataFrame
from pandas import Series
from pandas import concat
from pandas import read_csv
# from pandas import datetime
from sklearn.metrics import mean_squared_error
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from math import sqrt
from matplotlib import pyplot
import numpy


def timeseries_to_supervised(data, lag=1):
    """
    将时间序列转化为监督问题，将原始序列用shift后移一位，首位补充0，作为监督数据
    :param data: 数据
    :param lag: 时间步
    :return:
    """
    df = DataFrame(data)
    columns = [df.shift(i) for i in range(1, lag+1)]
    columns.append(df)
    df = concat(columns, axis=1)
    df.fillna(0, inplace=True)
    return df


def difference(dataset, interval=1):
    """
    差分操作，后一个减去前一个，据说有助于将数据变成平稳数据
    :param dataset:
    :param interval:
    :return:
    """
    diff = list()
    for i in range(interval, len(dataset)):
        value = dataset[i] - dataset[i - interval]
        diff.append(value)
    return Series(diff)


def inverse_difference(history, yhat, interval=1):
    """
    反转差分操作
    :param history:
    :param yhat:
    :param interval:
    :return:
    """
    return yhat + history[-interval]


def scale(train, test):
    """
    归一化，为了迎合lstm的激活函数，归一到-1，1范围
    :param train:
    :param test:
    :return:
    """
    # fit scaler
    scaler = MinMaxScaler(feature_range=(-1, 1))
    scaler = scaler.fit(train)
    # transform train
    train = train.reshape(train.shape[0], train.shape[1])
    train_scaled = scaler.transform(train)
    # transform test
    test = test.reshape(test.shape[0], test.shape[1])
    test_scaled = scaler.transform(test)
    return scaler, train_scaled, test_scaled


def invert_scale(scaler, X, value):
    """
    反转归一化操作
    :param scaler:
    :param X:
    :param value:
    :return:
    """
    new_row = [x for x in X] + [value]
    array = numpy.array(new_row)
    array = array.reshape(1, len(array))
    inverted = scaler.inverse_transform(array)
    return inverted[0, -1]


def fit_lstm(train, batch_size, nb_epoch, neurons):
    """
    我们定义一个名为fit_lstm()的函数，它训练并返回一个LSTM模型。作为参数，它采用监督学习格式的训练数据集、批处理大小、多个epoch和多个神经元。
    :param train:
    :param batch_size:
    :param nb_epoch:
    :param neurons:
    :return:
    """
    X, y = train[:, 0:-1], train[:, -1]
    X = X.reshape(X.shape[0], 1, X.shape[1])  #将X重塑为样本、时间步、特征，这是lstm想要的格式
    model = Sequential()
    # 在定义LSTM层时将stateful参数设置为True,这称为有状态
    model.add(LSTM(neurons, batch_input_shape=(batch_size, X.shape[1], X.shape[2]), stateful=True))
    model.add(Dense(1))
    model.compile(loss='mean_squared_error', optimizer='adam')
    # 下面是一个手动将网络与训练数据相匹配的循环
    for i in range(nb_epoch):
        model.fit(X, y, epochs=1, batch_size=batch_size, verbose=1, shuffle=False)
        model.reset_states()
    return model


def forecast_lstm(model, batch_size, X):
    """
    预测
    :param model:
    :param batch_size:
    :param X:
    :return:
    """
    X = X.reshape(1, 1, len(X))
    yhat = model.predict(X, batch_size=batch_size)
    return yhat[0,0]

if __name__ == '__main__':

    series = read_csv(
        'E:\lyf_ML_Drought\coding\ML_Drought_Prediction\indices_caculate\\result\ROW_SPEI\ROW_SPEI-12\SPEI-12_52533.txt',
        header=None, names=('TIME', 'SPEI-12'))
    series = series.set_index(['TIME'], drop=True)  # 把日期作为索引

    raw_values = series.values
    diff_values = difference(raw_values, 1)

    supervised = timeseries_to_supervised(diff_values, 1)
    supervised_values = supervised.values

    train, test = supervised_values[0:-12], supervised_values[-12:]

    scaler, train_scaled, test_scaled = scale(train, test)

    # 重复实验（每一次会不一样，要多次重复取平均值）
    repeats = 10
    error_scores = list()
    for r in range(repeats):
        lstm_model = fit_lstm(train_scaled, 1, 3000, 50) # 可以尝试1500epoch和1neuron，好像效果会好一点？
        # 对整个训练数据集进行预测，建立预测状态
        train_reshaped = train_scaled[:, 0].reshape(len(train_scaled), 1, 1)
        lstm_model.predict(train_reshaped, batch_size=1)
        # 测试数据的前向验证
        predictions = list()
        for i in range(len(test_scaled)):
            # 一步预测
            X, y = test_scaled[i, 0:-1], test_scaled[i, -1]
            yhat = forecast_lstm(lstm_model, 1, X)
            # 反转归一
            yhat = invert_scale(scaler, X, yhat)
            # 反转差分
            yhat = inverse_difference(raw_values, yhat, len(test_scaled)+1-i)
            # 存储预测值
            predictions.append(yhat)
        # 性能
        rmse = sqrt(mean_squared_error(raw_values[-12:], predictions))
        print('%d) Test RMSE: %.3f' % (r+1, rmse))
        error_scores.append(rmse)

    # 总结结果
    results = DataFrame()
    results['rmse'] = error_scores
    print(results.describe())
    results.boxplot()
    pyplot.show()