import datetime

import numpy as np
from matplotlib import pyplot as plt
import pickle
from itertools import chain
import entropy
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_absolute_error,r2_score,mean_squared_error
from matplotlib.offsetbox import AnchoredText
import configparser

plt.rcParams['font.family'] = ['Microsoft YaHei']
plt.rcParams['font.size'] = 11
plt.rcParams['axes.unicode_minus'] = False

def MAPE(true,pred):
    diff = np.abs(np.array(true) - np.array(pred))
    return np.mean(diff / true)*100

def load_model():
    config_file = 'train_pso_svr.config'
    config = configparser.ConfigParser()
    config.read(config_file)
    pickle_save_path = config["Model"]["pickle_save_path"]

    with open(pickle_save_path + 'svr_speed_model.pickle','rb') as f:
        svr_speed = pickle.load(f)

    with open(pickle_save_path + 'svr_flow_model.pickle','rb') as f:
        svr_flow = pickle.load(f)

    with open(pickle_save_path + 'lstm_speed.pickle','rb') as f:
        lstm_speed = pickle.load(f)

    with open(pickle_save_path + 'lstm_flow.pickle','rb') as f:
        lstm_flow = pickle.load(f)

    with open(pickle_save_path + 'scaler_speed_model.pickle','rb') as f:
        scaler_speed = pickle.load(f)

    with open(pickle_save_path + 'scaler_flow_model.pickle','rb') as f:
        flow_speed = pickle.load(f)

    return lstm_speed,lstm_flow,svr_speed,svr_flow,scaler_speed,flow_speed

def read_excel(fname):
    df = pd.read_excel('data/'+fname+'.xlsx',index_col = 0)
    speed = np.array(df['speed']).reshape(-1, 1)
    flow = np.array(df['flow']).reshape(-1, 1)

    return_dict = {}
    return_dict['speed'] = speed
    return_dict['flow'] = flow


    return return_dict


def get_LSTM_input(up_fname,down_fname,test_mode=False):

    vec_up = read_excel(up_fname)
    speed_up = vec_up['speed'].transpose().tolist()[0]
    flow_up = vec_up['flow'].transpose().tolist()[0]
    occu_up = vec_up['occu'].transpose().tolist()[0]
    date = vec_up['time']

    vec_down = read_excel(down_fname)
    speed_down = vec_down['speed'].transpose().tolist()[0]
    flow_down = vec_down['flow'].transpose().tolist()[0]
    occu_down = vec_down['occu'].transpose().tolist()[0]

    x = []
    y_speed = []
    y_flow = []
    y_occu = []
    time_series_input = {}
    time_series_output = {}

    # features = np.asarray(list(chain.from_iterable(zip(speed_up,flow_up,occu_up)))).reshape(-1,3)
    features = np.asarray(list(chain.from_iterable(zip(speed_up,flow_up)))).reshape(-1,2)


    time_steps = 100
    # seq_len = 3
    seq_len = 2

    batch_size = len(features)-time_steps

    scalers = {}
    scalers['down_speed'] = vec_down['scaler_speed']
    scalers['down_flow'] = vec_down['scaler_flow']
    scalers['down_occu'] = vec_down['scaler_occu']
    scalers['up_speed'] = vec_up['scaler_speed']
    scalers['up_flow'] = vec_up['scaler_flow']
    scalers['up_occu'] = vec_up['scaler_occu']



    for i in range(len(speed_up)):
        if i+time_steps <= len(speed_up):
            feat_vec = features[i:i+time_steps].tolist() # 上游的数据
            x.append(feat_vec)

            y_speed.append(speed_down[i+time_steps-1]) # 下游的数据
            y_flow.append(flow_down[i+time_steps-1])
            if test_mode:
                time_series_input[date[i+time_steps-1]] = feat_vec
                time_series_output[date[i+time_steps-1]] = [y_speed[-1],y_flow[-1]]
        else:
            break
    x = np.asarray(x)
    y = np.asarray(list(chain.from_iterable(zip(y_speed,y_flow)))).reshape(-1,2)

    print (x.shape)

    if test_mode:
        return time_series_input, time_series_output, scalers
    else:
        return x, y, scalers

    return EOFError


def draw_pred_curve(x_time, y_pred_speed, y_pred_flow, y_true_speed, y_true_flow):
    fig = plt.figure(figsize=(15,9.375))
    ax1 = plt.subplot(211)
    ax1.plot(x_time,y_pred_speed,linestyle = '-',color = '#4285F4',label = 'predict')
    ax1.plot(x_time,y_true_speed,linestyle = '-',color = '#DB4437',label = 'true')
    ax1.set(ylabel='speed')
    ax1.set_title('Off-site traffic speed prediction')
    ax1.legend(loc='best',framealpha=0.5)

    speed_annotation = '{}{:.4f}\n{}{:.4f}\n{}{:.4f}\n{}{:.4f}'.format(
            'mae: ',
            mean_absolute_error(y_true_speed,y_pred_speed),
            'r2 score: ',
            r2_score(y_true_speed,y_pred_speed),
            'mse: ',
            mean_squared_error(y_true_speed,y_pred_speed),
            'mape: ',
            MAPE(y_true_speed,y_pred_speed)
    )
    at = AnchoredText(speed_annotation,
                      prop=dict(size=10), frameon=True,
                      loc='lower left',
                      )
    at.patch.set_boxstyle("round,pad=0.,rounding_size=0.2")
    ax1.add_artist(at)


    ax2 = plt.subplot(212)
    ax2.plot(x_time,y_pred_flow,linestyle = '-',color = '#F4B400',label = 'predict')
    ax2.plot(x_time,y_true_flow,linestyle = '-',color = '#0F9D58',label = 'true')
    ax2.set(ylabel='flow')
    ax2.legend(loc='best',framealpha=0.5)
    ax2.set_title('Off-site traffic flow prediction')

    flow_annotation = '{}{:.4f}\n{}{:.4f}\n{}{:.4f}\n{}{:.4f}'.format(
            'mae: ',
            mean_absolute_error(y_true_flow,y_pred_flow),
            'r2 score: ',
            r2_score(y_true_flow,y_pred_flow),
            'mse: ',
            mean_squared_error(y_true_flow,y_pred_flow),
            'mape: ',
            MAPE(y_true_flow,y_pred_flow)
    )


    at = AnchoredText(flow_annotation,
                      prop=dict(size=10), frameon=True,
                      loc='lower left',
                      )
    at.patch.set_boxstyle("round,pad=0.,rounding_size=0.1")
    ax2.add_artist(at)

    plt.show()
    plt.savefig('train_result/pic/Final.png', dpi=300)


    return x_time, y_pred_speed, y_pred_flow, y_true_speed, y_true_flow

def draw_rank(x_time,pred_rank,true_rank,accu):
    fig = plt.figure(figsize=(15,9.375))
    ax = plt.subplot2grid((2,2),(0,0),colspan=2)

    cut = np.where(x_time==np.datetime64('2019-04-09T00:00:00'))[0].tolist()[0]

    ax.plot(x_time[cut:],pred_rank[cut:],color = '#4285F4',label='Predict Rank',marker='.')
    ax.plot(x_time[cut:],true_rank[cut:],color = '#DB4437',label='True Rank',marker='o',fillstyle='none')

    ax.set(xlabel='时间')
    ax.set(ylabel='等级')
    ax.set_ylim([0,7])
    ax.set_title('全天交通拥堵等级')
    ax.legend(loc='best',framealpha=0.5)

    speed_annotation = 'accuracy: {:.4f}'.format(accu)

    at = AnchoredText(speed_annotation,
                      prop=dict(size=10), frameon=True,
                      loc='upper left',
    )
    at.patch.set_boxstyle("round,pad=0.,rounding_size=0.2")
    ax.add_artist(at)

    ax2 = plt.subplot2grid((2,2),(1,0),colspan=1)
    start = np.where(x_time==np.datetime64('2019-04-09T05:45:00'))[0].tolist()[0]
    end = np.where(x_time==np.datetime64('2019-04-09T08:15:00'))[0].tolist()[0]+1

    ax2.plot(x_time[start:end],pred_rank[start:end],color = '#4285F4',label='Predict Rank',marker='.')
    ax2.plot(x_time[start:end],true_rank[start:end],color = '#DB4437',label='True Rank',marker='o',fillstyle='none')

    ax2.set(xlabel='时间')
    ax2.set(ylabel='等级')
    ax2.set_ylim([0,7])
    ax2.set_title('早高峰')
    ax2.legend(loc='best',framealpha=0.5)


    ax3 = plt.subplot2grid((2,2),(1,1),colspan=1)
    start = np.where(x_time==np.datetime64('2019-04-09T16:45:00'))[0].tolist()[0]
    end = np.where(x_time==np.datetime64('2019-04-09T19:15:00'))[0].tolist()[0]+1

    my_stick = np.arange(np.datetime64('2019-04-09T16:45:00'), np.datetime64('2019-04-09T19:15:00'), 1200)

    ax3.plot(x_time[start:end],pred_rank[start:end],color = '#4285F4',label='Predict Rank',marker='.')
    ax3.plot(x_time[start:end],true_rank[start:end],color = '#DB4437',label='True Rank',marker='o',fillstyle='none')

    ax3.set(xlabel='时间')
    ax3.set(ylabel='等级')
    ax3.set_ylim([0,7])
    ax3.set_title('晚高峰')
    ax3.legend(loc='best',framealpha=0.5)


    plt.tight_layout()
    plt.savefig('train_result/pic/rank.png', dpi=300)
    plt.show()


class EtcModel():
    def __init__(self):
        self.up_fname = 'test_up'
        self.down_fname = 'test_down'
        self.lstm_speed, self.lstm_flow, self.svr_speed, self.svr_flow, self.scaler_speed, self.flow_speed = load_model()
        self.scaler_up_speed,self.scaler_up_flow,self.scaler_down_speed,self.scaler_down_flow = self.get_scaler()
    def get_scaler(self):
        df1 = pd.read_excel('data/test_up.xlsx', index_col=0)
        df2 = pd.read_excel('data/test_down.xlsx', index_col=0)
        scaler_up_speed = MinMaxScaler()
        scaler_up_flow = MinMaxScaler()
        scaler_down_speed = MinMaxScaler()
        scaler_down_flow = MinMaxScaler()

        scaler_up_speed.fit(np.array(df1['speed']).reshape(-1, 1))
        scaler_up_flow.fit(np.array(df1['flow']).reshape(-1, 1))
        scaler_down_speed.fit(np.array(df2['speed']).reshape(-1, 1))
        scaler_down_flow.fit(np.array(df2['flow']).reshape(-1, 1))
        return scaler_up_speed,scaler_up_flow,scaler_down_speed,scaler_down_flow

    def fit_scaler(self,data_type,data):
        if data_type == 'svr_data':
            scaler_speed = self.scaler_up_speed
            scaler_flow =self.scaler_up_flow
        else:
            scaler_speed = self.scaler_down_speed
            scaler_flow = self.scaler_down_flow
        data_array = np.array(data)
        data_array[:,0] = scaler_speed.transform(np.array(data_array[:,0]).reshape(-1,1)).flatten()
        data_array[:,1] = scaler_flow.transform(np.array(data_array[:,1]).reshape(-1,1)).flatten()
        return list(data_array)

    def predict_using_Model(self,up_svr_input, down_lstm_input):

        tempVec = np.asarray(up_svr_input)
        speed = tempVec[:, 0].reshape(-1, 1)
        flow = tempVec[:, 1].reshape(-1, 1)
        speed = self.scaler_up_speed.inverse_transform(speed).transpose().ravel()
        flow = self.scaler_up_flow.inverse_transform(flow).transpose().ravel()
        svr_input = np.append(speed, flow).reshape(1, 6)

        # 5分钟以后的速度，流量
        next_up_speed = self.svr_speed.predict(svr_input)
        next_up_flow = self.svr_flow.predict(svr_input)
        next_up_speed_uniformed = self.scaler_up_speed.transform([next_up_speed]).item()
        next_up_flow_uniformed = self.scaler_up_flow.transform([next_up_flow]).item()

        # 因为是预测五分钟以后，所以时间线最开始的要删除，最后加上预测值
        lstm_input = down_lstm_input[1:]

        lstm_input.append([next_up_speed_uniformed, next_up_flow_uniformed])
        next_down_speed_uniformed = self.lstm_speed.predict(np.asarray(lstm_input).reshape(1, 100, 2))
        next_down_flow_uniformed = self.lstm_flow.predict(np.asarray(lstm_input).reshape(1, 100, 2))

        next_down_speed = \
            self.scaler_down_speed.inverse_transform(next_down_speed_uniformed).tolist()[0][0]
        next_down_flow = \
            self.scaler_down_flow.inverse_transform(next_down_flow_uniformed).tolist()[0][0]

        return next_down_speed, next_down_flow

    def predict(self,timestamp,svr_data,lstm_data):
        time = datetime.datetime.fromtimestamp(timestamp)
        pred_speed, pred_flow = self.predict_using_Model(up_svr_input=svr_data,down_lstm_input=lstm_data)
        print(pred_speed, pred_flow)
        mor_weight,eve_weight,oth_weight = entropy.get_final_weight()
        pred_matrix = entropy.matrix_with_rush_tag([time],pred_speed,pred_flow)
        pred_rank = entropy.congestion_rank(pred_matrix,mor_weight,eve_weight,oth_weight)
        return pred_speed,pred_flow,pred_rank


if __name__ == '__main__':
    config = configparser.ConfigParser()
    Model = EtcModel()



