import datetime
import math
import random
from datetime import timedelta
import joblib
import numpy
import numpy as np
import pymysql
from matplotlib import pyplot as plt, pyplot
from numpy import float16, float32
from sklearn import metrics
from sklearn.metrics import mean_absolute_error
from sklearn.neural_network import MLPRegressor
from sklearn.preprocessing import OneHotEncoder, MinMaxScaler
from sklearn import ensemble
from sklearn.svm import SVR
import pickle

rf_model_dict= {}
rf_model_dict["station"]={}
rf_model_dict["line"]={}


def get_data():
    conn = pymysql.connect(host='localhost',  # 连接名称
                           user='root',  # 用户名
                           passwd='q19723011',  # 密码
                           port=3306,  # 端口，默认为3306
                           db='month6',  # 数据库
                           charset='utf8',  # 字符编码
                           )
    cur = conn.cursor()  # 生成游标对象
    sql = "select * from stationflow  "
    cur.execute(sql)
    datas = list(cur.fetchall())
    conn.commit()
    cur.close()  # 关闭游标
    conn.close()  # 关闭连接
    return datas


def train(datas):
    # 处理数据，划分训练集，测试集，归一化处理，独热编码处理
    data_count = len(datas)
    # random.shuffle(datas)  # 打乱数据
    datas = np.array(datas)
    data_case = datas[:, 0:6]  # 获取特征值
    data_label = datas[:, 6:7]  # 获取标签
    mm = MinMaxScaler()
    data_label_process = mm.fit_transform(data_label)  # 对数据归一化处理
    # enc = OneHotEncoder(sparse=False)
    # enc.fit(data_case[:, 0:1])
    # data_case_h = enc.transform(data_case[:, 0:1])  # 对特征值进行独热编码
    mm_case = MinMaxScaler()
    data_case_hot = mm_case.fit_transform(data_case)  # 对数据归一化处理
    # data_case_hot = np.append(data_case_h, data_case_mm, axis=1)
    # # 以7:3划分训练集和测试集
    # train_data_case = data_case_hot[0:int(data_count * 0.7)]
    # train_data_label = data_label_process[0:int(data_count * 0.7)]
    # test_data_case = data_case_hot[int(data_count * 0.7):]
    # test_data_label = data_label_process[int(data_count * 0.7):]
    # 最后一个是预测目标
    test_data_case = data_case_hot[0:1]
    test_data_label = data_label_process[0:1]
    train_data_case = data_case_hot[1:]
    train_data_label = data_label_process[1:]
    # 训练模型
    model = MLPRegressor(hidden_layer_sizes=(7, 8, 8), activation='tanh', solver='adam', max_iter=2000,
                         learning_rate='adaptive', learning_rate_init=0.02)  # BP神经网络回归模型
    model.fit(train_data_case, train_data_label.ravel())  # 训练模型
    pre_train = model.predict(train_data_case)  # 模型训练集预测
    pre_test = model.predict(test_data_case)  # 模型测试机预测
    pre = mm.inverse_transform(np.append(pre_train, pre_test).reshape(1, -1))[0]  # 反归一化
    return [data_label[0], pre[0]]


def bpnn_data(time, stationID):
    conn = pymysql.connect(host='localhost',  # 连接名称
                           user='root',  # 用户名
                           passwd='q19723011',  # 密码
                           port=3306,  # 端口，默认为3306
                           db='month6',  # 数据库
                           charset='utf8',  # 字符编码
                           )
    cur = conn.cursor()  # 生成游标对象
    dataProcess = [[], []]
    start_time = "".join(str(time - timedelta(minutes=30)).split(':')[:3])
    sql = "select * from station_time_flow where time >= %s and  time <= %s and stationID = %d" \
          % (start_time, "".join(str(time).split(':')[:3]), stationID)
    cur.execute(sql)
    datas = list(cur.fetchall())
    for i in range(0, len(datas), 7):
        week = datas[i][1].weekday() + 1
        if 1 <= week <= 5:
            dayType = 0
        else:
            dayType = 1
        data_list = [datas[i][3], datas[i + 1][3],
                     datas[i + 2][3], datas[i + 3][3], datas[i + 4][3],
                     datas[i + 5][3], datas[i + 6][3]]
        dataProcess[dayType].append(data_list)

    conn.commit()
    cur.close()  # 关闭游标
    conn.close()  # 关闭连接
    return [train(dataProcess[0]), train(dataProcess[1])]


def bpnn(stationID):
    m = timedelta(hours=7)
    conn = pymysql.connect(host='localhost',  # 连接名称
                           user='root',  # 用户名
                           passwd='q19723011',  # 密码
                           port=3306,  # 端口，默认为3306
                           db='month6',  # 数据库
                           charset='utf8',  # 字符编码
                           )
    cur = conn.cursor()  # 生成游标对象
    res_list = [[[], []], [[], []]]
    datelist = ["20180601", "20180602"]
    while m <= timedelta(hours=23):
        res = bpnn_data(m, stationID)
        for i in range(len(datelist)):
            sql = """  insert into stationprebpnn(stationID,date,time,oriflow,preflow) values (%d,%s,%s,%d,%d)""" \
                  % (stationID, datelist[i], "".join(str(m).split(':')[:3]), res[i][0], res[i][1])
            try:
                cur.execute(sql)
            except Exception as e:
                print("except:", e)
                print(sql)
            res_list[i][0].append(res[i][0])
            res_list[i][1].append(res[i][1])
        m = m + timedelta(minutes=5)
    for i in range(len(res_list)):
        mse = metrics.mean_squared_error(res_list[i][0], res_list[i][1])
        rmse = np.sqrt(mse)
        mae = metrics.mean_absolute_error(res_list[i][0], res_list[i][1])
        mape = np.mean(np.abs((np.array(res_list[i][0]) - np.array(res_list[i][1])) / np.array(res_list[i][0]))) * 100
        # print("开始画图")
        # show(res_list[i][0], res_list[i][1])
        sql = """insert into station_evaluating_indicator_bpnn(stationID,date,mse,rmse,mae) values (%d,%s,%f,%f,%f
                    )""" % (stationID, datelist[i], mse, rmse, mae)
        try:
            cur.execute(sql)
        except Exception as e:
            print("except:", e)
            print(sql)

    conn.commit()
    cur.close()  # 关闭游标
    conn.close()  # 关闭连接


def show(ori_data, pre_data):
    x = range(len(ori_data))
    y1 = ori_data
    y2 = pre_data
    plt.title('bpnn')  # 折线图标题
    plt.rcParams['font.sans-serif'] = ['SimHei']  # 显示汉字
    plt.xlabel('时间')  # x轴标题
    plt.ylabel('客流量')  # y轴标题
    plt.plot(x, y1, marker='o', markersize=1)  # 绘制折线图，添加数据点，设置点的大小
    plt.plot(x, y2, marker='o', markersize=1)

    # for a, b in zip(x, y1):
    #     plt.text(a, b, b, ha='center', va='bottom', fontsize=10)  # 设置数据标签位置及大小
    # for a, b in zip(x, y2):
    #     plt.text(a, b, b, ha='center', va='bottom', fontsize=10)

    plt.legend(['实际', '预测'])  # 设置折线名称

    plt.show()  # 显示折线图

def day_type(day):
    if 1 <= day <= 5:
        return 0
    else:
        return 1
    return 1
def bpnnDateBase():
    conn = pymysql.connect(host='localhost',  # 连接名称
                           user='root',  # 用户名
                           passwd='q19723011',  # 密码
                           port=3306,  # 端口，默认为3306
                           db='month6',  # 数据库
                           charset='utf8',  # 字符编码
                           )
    cur = conn.cursor()  # 生成游标对象
    sql = """select *from station"""
    cur.execute(sql)
    datas = list(cur.fetchall())
    stationIDs = np.array(datas)[:, 0:1]
    conn.commit()
    cur.close()  # 关闭游标
    conn.close()  # 关闭连接
    for stationID in stationIDs:
        print(stationID)
        bpnn(stationID)



def rf_station_data(time, stationID, date):
    conn = pymysql.connect(host='localhost',  # 连接名称
                           user='root',  # 用户名
                           passwd='q19723011',  # 密码
                           port=3306,  # 端口，默认为3306
                           db='month6',  # 数据库
                           charset='utf8',  # 字符编码
                           )
    cur = conn.cursor()  # 生成游标对象
    dataProcess = []
    start_time = "".join(str(time - datetime.timedelta(minutes=30)).split(':')[:3])
    sql = "select * from station_time_flow where time >= %s and  time <= %s and stationID = %d" \
          % (start_time, "".join(str(time).split(':')[:3]), int(stationID))
    cur.execute(sql)
    datas = list(cur.fetchall())
    ob_dayType = day_type(datetime.datetime.strptime(date, "%Y-%m-%d").date().weekday() + 1)
    ob_data_list = list()
    for i in range(0, len(datas), 7):
        if datas[i][1] == datetime.datetime.strptime(date, "%Y-%m-%d").date():
            ob_data_list = [datas[i][3], datas[i + 1][3],
                            datas[i + 2][3], datas[i + 3][3], datas[i + 4][3],
                            datas[i + 5][3], datas[i + 6][3]]
        elif day_type(datas[i][1].weekday() + 1) == ob_dayType:
            data_list = [datas[i][3], datas[i + 1][3],
                         datas[i + 2][3], datas[i + 3][3], datas[i + 4][3],
                         datas[i + 5][3], datas[i + 6][3]]
            dataProcess.append(data_list)
    dataProcess.append(ob_data_list)

    conn.commit()
    cur.close()  # 关闭游标
    conn.close()  # 关闭连接
    return rf_train(dataProcess)



def rf_train(datas):
    # 处理数据，划分训练集，测试集，归一化处理，独热编码处理
    datas = np.array(datas)
    data_case = datas[:, 0:6]  # 获取特征值
    data_label = datas[:, 6:7]  # 获取标签
    mm = MinMaxScaler()
    data_label_process = mm.fit_transform(data_label)  # 对数据归一化处理
    mm_case = MinMaxScaler()
    data_case_process = mm_case.fit_transform(data_case)  # 对数据归一化处理
    test_data_case = data_case_process[len(data_case_process) - 1:]
    test_data_label = data_label_process[len(data_label_process) - 1:]
    train_data_case = data_case_process[0:len(data_case_process) - 1]
    train_data_label = data_label_process[0:len(data_label_process) - 1]
    # 训练模型
    model = SVR(kernel='rbf')
    model.fit(train_data_case, train_data_label.ravel())

    # print("Traing Score:%f" % regr.score(X_train, y_train))
    # print("Testing Score:%f" % regr.score(X_test, y_test))
    pre_train = model.predict(train_data_case)  # 模型训练集预测
    pre_test = model.predict(test_data_case)  # 模型测试机预测






    # model = MLPRegressor(hidden_layer_sizes=(7, 8, 8), activation='tanh', solver='adam', max_iter=2000,
    #                      learning_rate='adaptive', learning_rate_init=0.02)  # BP神经网络回归模型
    # model.fit(train_data_case, train_data_label.ravel())  # 训练模型
    # pre_train = model.predict(train_data_case)  # 模型训练集预测
    # pre_test = model.predict(test_data_case)  # 模型测试机预测
    print("123")
    pre = mm.inverse_transform(pre_test.reshape(1, -1))[0]  # 反归一化
    # show(data_label,pre)
    return [float(data_label[-1][0]), float(int(pre[-1])),model]


def rf_predict():

    lineID = '3'
    stationID = '103'
    date = '2018-06-03'
    dayType = datetime.datetime.strptime(date, "%Y-%m-%d").weekday() + 1
    # stationName = Station.objects.filter(stationid=stationID).values()[0]["stationname"]
    m = datetime.timedelta(hours=7)
    station_test_flow = list()
    station_pre_flow = list()
    line_test_flow = list()
    line_pre_flow = list()
    # rf_model_dict["station"][stationID] = {}
    # rf_model_dict["station"][stationID]["workday"] = {}
    # rf_model_dict["station"][stationID]["weekday"] = {}
    model_dict ={}
    while m <= datetime.timedelta(hours=23):
        print(m)
        station_res = rf_station_data(m, stationID, date)
        model = station_res[2]
        model_dict[m]=model
        # line_res = bpnn_line_data(m, lineID, date)
        station_test_flow.append(station_res[0])
        station_pre_flow.append(station_res[1])
        # line_test_flow.append(line_res[0])
        # line_pre_flow.append(line_res[1])
        m = m + datetime.timedelta(minutes=5)
    with open('D:\PythonProject\graduationProject\saveModels\RF\station\clf.pkl', 'wb') as f:
        pickle.dump(model_dict, f)

    station_mse = metrics.mean_squared_error(station_test_flow, station_pre_flow)
    station_rmse = np.sqrt(station_mse)
    station_mae = metrics.mean_absolute_error(station_test_flow, station_pre_flow)
    # line_mse = metrics.mean_squared_error(line_test_flow, line_pre_flow)
    # line_rmse = np.sqrt(line_mse)
    # line_mae = metrics.mean_absolute_error(line_test_flow, line_pre_flow)
    # response = {"line": {"lineID": lineID, "lineName": str(lineID) + "号线", "date": date,
    #                      "testflow": line_test_flow, "preflow": line_pre_flow,
    #                      "mse": int(line_mse), "rmse": int(line_rmse), "mae": int(line_mae)},
    #             "station": {"stationID": stationID, "stationName": stationName, "date": date,
    #                         "testflow": station_test_flow, "preflow": station_pre_flow,
    #                         "mse": int(station_mse), "rmse": int(station_rmse), "mae": int(station_mae)}
    #             }
    show(station_test_flow,station_pre_flow)
    print(station_mae,station_rmse,station_mse)
    # return response
if __name__ == '__main__':
    # bpnnDateBase()
    # bpnn(102)
    rf_predict()
    with open('D:\PythonProject\graduationProject\saveModels\RF\station\clf.pkl', 'rb') as f:
        clf2 = pickle.load(f)
        # 测试读取后的Model
        print(clf2)
