"""
时序测试
implementation: 2, drop_p: [0.01, 0.1]
loss: 0.004216130822896957, act: ['sigmoid', 'tanh'], opt; Adadelta， unit; 50, loss: mse
"""

import os
import datetime
import numpy as np
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import GRU, Dense
from keras.regularizers import L2
from utils.model_fit import train_model
from utils.plot_graph import keras_plot_model
from utils.search_model_paramter import grid_search


def load_data():
    """
    数据加载，并生成训练集和测试集
    :return:
    """
    file = './data/运输量.xlsx'
    df = pd.read_excel(file, sheet_name=1)
    df['month'] = df['month'].apply(
        lambda x: datetime.datetime.strptime(x, '%Y年%m月'))
    df = df.sort_values(by='month')
    # df.index = pd.date_range(start='2015', periods=len(df), freq='M')
    df = df.set_index('month')
    df = df.sort_index()

    return df['铁路货运量当期值(万吨)']


def build_fit_model(train_x, train_y, test_x, test_y,
                    unit, act, opt, loss, epochs, batch_size, verbose,
                    pic_path, model_name, workers=4,
                    use_multiprocessing=True, shuffle=True, plot=False,
                    **options):
    """

    :param train_x:
    :param train_y:
    :param test_x:
    :param test_y:
    :param unit:
    :param act:
    :param opt:
    :param loss:
    :param epochs:
    :param batch_size:
    :param verbose:
    :param pic_path:
    :param model_name:
    :param workers:
    :param use_multiprocessing:
    :param shuffle:
    :param plot:
    :return:
    """
    drop_p = options.get('drop_p')
    implementation = options.get('implementation')

    model = Sequential()
    model.add(GRU(units=unit,
                   input_shape=(train_x.shape[1], train_x.shape[2]),
                   activation=act[0],
                  recurrent_activation=act[1],
                   dropout=drop_p[0],
                  recurrent_dropout=drop_p[1],
                  implementation=implementation))

    model.add(Dense(units=1))

    model.compile(optimizer=opt,
                  loss=loss,
                  metrics=['accuracy'])

    return train_model(model=model,
                       train_x=train_x, train_y=train_y,
                       test_x=test_x, test_y=test_y,
                       epochs=epochs,
                       model_name=model_name,
                       pic_path=pic_path,
                       shuffle=shuffle,
                       verbose=verbose,
                       plot=plot,
                       batch_size=batch_size,
                       workers=workers,
                       use_multiprocessing=use_multiprocessing)


def sequence_gru_run(workers, valid_ratio=0.1, test_ratio=0.1):
    """

    :return:
    """
    data = load_data()
    df = pd.DataFrame(data)
    df.columns = ['data']
    df['forecast'] = df['data'].shift(-1)
    train_data = df.values[: -1]

    scaler = MinMaxScaler(feature_range=(0, 1))
    train_data = scaler.fit_transform(X=train_data)

    train_size = int(len(train_data) * (1 - valid_ratio - test_ratio))
    valid_size = int(len(train_data) * valid_ratio)

    train = train_data[: train_size]
    valid = train_data[train_size: train_size + valid_size]
    test = train_data[train_size + valid_size:]

    train_x = train[:, :-1]
    train_y = train[:, -1:]
    valid_x = valid[:, :-1]
    valid_y = valid[:, -1:]
    test_x = test[:, :-1]
    test_y = test[:, -1:]

    # 每月
    step = 1
    train_x = train_x.reshape((train_x.shape[0], step, train_x.shape[1]))
    train_y = train_y.reshape((train_y.shape[0], step, train_y.shape[1]))
    valid_x = valid_x.reshape((valid_x.shape[0], step, valid_x.shape[1]))
    valid_y = valid_y.reshape((valid_y.shape[0], step, valid_y.shape[1]))
    test_x = test_x.reshape((test_x.shape[0], step, test_x.shape[1]))
    test_y = test_y.reshape((test_y.shape[0], step, test_y.shape[1]))

    get_parameter = True

    unit_arr = [40, 50, 60]
    #unit_arr = [256, 512, 1024]
    #drop_p_arr = [0, 0.01, 0.1]
    # drop_p_arr = [[0, 0], [0, 0.01], [0, 0.1],
    #               [0.01, 0], [0.01, 0.1], [0.01, 0.01],
    #               [0.1, 0], [0.1, 0.01], [0.1, 0.1]]
    drop_p_arr = [[0.01, 0.1],[0.01, 0.01],[0.1, 0.01],[0.1, 0.1]]
    #  drop_p_arr = [0]
    # 32, 1024
    # act_arr = ['relu', 'softmax', 'softsign']
    # act_arr = [['relu', 'relu'], ['relu', 'softsign'], ['relu', 'sigmoid'], ['relu', 'tanh'],
    #            ['softsign', 'relu'], ['softsign', 'softsign'], ['softsign', 'sigmoid'], ['softsign', 'tanh'],
    #            ['sigmoid', 'relu'], ['sigmoid', 'softsign'], ['sigmoid', 'sigmoid'], ['sigmoid', 'tanh'],
    #            ['tanh', 'relu'], ['tanh', 'softsign'], ['tanh', 'sigmoid'], ['tanh', 'tanh']]
    act_arr = [['relu', 'relu'], ['relu', 'softsign'], ['relu', 'sigmoid'],
               ['relu', 'tanh'],
               ['sigmoid', 'relu'], ['sigmoid', 'softsign'],
               ['sigmoid', 'sigmoid'], ['sigmoid', 'tanh'],
               ['tanh', 'relu'], ['tanh', 'softsign'], ['tanh', 'sigmoid'],
               ['tanh', 'tanh']]
    opt_arr = ['RMSprop', 'Adam', 'Adadelta', 'Nadam']
    # opt_arr = ['RMSprop', 'Adam', 'Nadam']
    # opt_arr = ['Adam', 'Nadam']
    # opt_arr = ['Adam']
    # loss_arr = ['mae', 'mape', 'mse']
    loss_arr = ['mse']
    epochs = 500
    batch_size = 300

    # 参数选择
    if get_parameter:
        for implementation in [1, 2]:
            for drop_p in drop_p_arr:
                print('implementation: {}, drop_p: {}'.format(implementation, drop_p))
                grid_search(func=build_fit_model,
                            train_x=train_x, train_y=train_y,
                            test_x=valid_x, test_y=valid_y,
                            unit_arr=unit_arr,
                            act_arr=act_arr,
                            opt_arr=opt_arr,
                            loss_arr=loss_arr,
                            epochs=epochs,
                            batch_size=batch_size,
                            workers=workers,
                            use_multiprocessing=True,
                            shuffle=True,
                            plot=False,
                            implementation=implementation,
                            drop_p=drop_p)

    # 模型训练
    else:
        model, loss_metrics = \
            build_fit_model(train_x=train_x, train_y=train_y,
                            test_x=valid_x, test_y=valid_y,
                            unit=[32, 128],
                            act=['relu', 'softmax'],
                            # opt='RMSprop',
                            opt='Nadam',
                            loss='categorical_crossentropy',
                            pic_path='../pic',
                            model_name='多分类RNN',
                            epochs=epochs,
                            batch_size=batch_size,
                            verbose=1,
                            workers=workers,
                            use_multiprocessing=True,
                            shuffle=True,
                            plot=True,
                            kernel_size=(5,5),
                            drop_p=0.5)
        print('loss_metrics: {}'.format(loss_metrics))
        # plot model
        keras_plot_model(model=model, pic='../pic/多分类RNN.png')
