# 最简化的 attention



#  input 和第一层 dense 输入还是问题：




import numpy as np

from ML_Methods.attention.utils import parse, data_perpare

np.random.seed(1337)  # for reproducibility
from keras.models import *
from keras.layers import Input, Dense, merge

import os

from math import sqrt
import time
import datetime as time_datetime

from pandas import read_csv
from pandas import DataFrame
from pandas import concat
from pandas import Series
from pandas import datetime

import numpy as np
from numpy import array
from numpy import concatenate

from keras import optimizers
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.callbacks import EarlyStopping
from keras.layers import Bidirectional
from keras.utils import multi_gpu_model

from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import LabelEncoder
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score

import matplotlib.pyplot as plt



# keract 4.3.1
# Compatible with latest tensorflow 2.3 (from keract 4.3.0).
# https://github.com/philipperemy/keract
import keract

'''
to replace : keract.get_activations(model, x, layer_names=None) return dict {}
'''


def get_activations(model, inputs, print_shape_only=False, layer_name=None):
    # Documentation is available online on Github at the address below.
    # From: https://github.com/philipperemy/keras-visualize-activations
    print('----- activations -----')
    activations = []
    inp = model.input
    if layer_name is None:
        outputs = [layer.output for layer in model.layers]
    else:
        outputs = [layer.output for layer in model.layers if layer.name == layer_name]  # all layer outputs
    funcs = [K.function([inp] + [K.learning_phase()], [out]) for out in outputs]  # evaluation functions
    layer_outputs = [func([inputs, 1.])[0] for func in funcs]
    for layer_activations in layer_outputs:
        activations.append(layer_activations)
        if print_shape_only:
            print(layer_activations.shape)
        else:
            print('shape为',layer_activations.shape)
            print(layer_activations)
    return activations


def build_model(input_dim:tuple):
    '''

    :param input_dim: input layer dimentioinal .
    :return:  model built
    '''
    # inputs = Input(shape=input_dim)
    #
    # # ATTENTION PART STARTS HERE
    # attention_probs = Dense(120  ,input_shape=input_dim, activation='softmax', name='attention_vec')(inputs)
    # # attention_mul = merge([inputs, attention_probs], output_shape=32, name='attention_mul', mode='mul')
    # attention_mul = merge.Multiply()([inputs, attention_probs])
    # # ATTENTION PART FINISHES HERE
    #
    # attention_mul = Dense(64)(attention_mul)
    # output = Dense(1, activation='sigmoid')(attention_mul)
    # model = Model(input=[inputs], output=output)
    # return model

    model = Sequential()
    """
    1.这里的Dense的第一个参数是节点数，可以从1~无穷调，可以加很多层Dence，这是第一个需要搜索的地方
    """
    print(input_dim)
    model.add(Dense(120, input_shape=input_dim, activation='relu'))
    model.add(Dense(1))

    return model


if __name__ == '__main__':

    experiment_time_start = time.time()  # 开始计时
    print('训练开始时间: ' + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiment_time_start)))

    WORK_PATH = os.getcwd()  # 获取当前项目工作目录，

    # WORK_ROOT_PATH = 'E:\study\MLDrought'  # for yf path
    WORK_ROOT_PATH = 'D:\Project\ml_study'  # for wkz path

    DATA_PATH = os.path.join(WORK_ROOT_PATH, r'indices_caculate\result\multi_spei_csv\SPEI-12\Multi_SPEI-12_56080.txt')

    # 数据读取
    dataset = read_csv(DATA_PATH, header=0, parse_dates=[['year', 'month']], index_col=0, date_parser=parse)
    dataset.index.name = 'time'
    # 删掉一些列
    dataset.drop(columns=['average_air_pressure',
                          'average_water_air_pressure',
                          'low_temp', 'high_temp',
                          'precipitation',
                          'temperature',
                          'humidity'], inplace=True)

    timesteps = 10*12 # 10年
    X_train, y_train, X_test, y_test =data_perpare(dataset, timesteps)

    features = X_train.shape[2]
    # also change X_test from [samples, timesteps ] to [samples, timesteps, features]
    X_test = X_test.reshape(X_test.shape[0],X_test.shape[1],features)

    # X_train [samples, timesteps, features]
    input_dim = (X_train.shape[1], X_train.shape[2],) # input dimentional  tuple = (timesteps, features, )

    # ANN model prepare
    ANN_model = build_model(input_dim)
    ANN_model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
    print(ANN_model.summary())

    ANN_model.fit(X_train, y_train, epochs=50, batch_size=64, validation_split=0.5)





    # using 1 test samples
    # testing_inputs_1, testing_outputs = get_data(1, input_dim)

    testing_inputs_1 = X_test[0] # first samples of X_test

    # Attention vector corresponds to the second matrix.
    # The first one is the Inputs output.
    attention_vector = get_activations(ANN_model, testing_inputs_1,
                                       print_shape_only=True,
                                       layer_name='attention_vec')[0].flatten()
    print('attention =', attention_vector)

    # plot part.
    import matplotlib.pyplot as plt
    import pandas as pd

    pd.DataFrame(attention_vector, columns=['attention (%)']).plot(kind='bar',
                                                                   title='Attention Mechanism as '
                                                                         'a function of input'
                                                                         ' dimensions.')
    plt.show()
