import datetime
import json
import pickle
import random

import numpy
import matplotlib
import numpy as np
import scipy
from sklearn import metrics, ensemble
from sklearn.metrics import mean_absolute_error
from sklearn.neighbors import RadiusNeighborsRegressor
from sklearn.neural_network import MLPRegressor
from sklearn.preprocessing import OneHotEncoder, MinMaxScaler
import pymysql
from django.core import serializers
from django.http import JsonResponse
from django.shortcuts import render
from sklearn.svm import SVR
from sklearn import linear_model
from django.views.decorators.csrf import csrf_exempt

# Create your views here.
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from sklearn.tree import DecisionTreeRegressor

from RailTransitCrowdForecasting.models import Lineflow, Stationflow, Station, Linedayflow, Stationdayflow


# 获取站点信息
@require_http_methods(["GET"])
def get_line(request, ):
    response = {}
    idlist = []
    lines = Lineflow.objects.values()
    mset = set()
    for line in lines:
        if line["lineid"] not in mset:
            mdic = dict()
            mset.add(line["lineid"])
            mdic["lineid"] = line["lineid"]
            idlist.append(mdic)
    response["linelist"] = idlist
    return JsonResponse(response)


@require_http_methods(["GET"])
def initial(request, ):
    response = {}
    stations = Station.objects.values()
    line_set = set()
    res = []
    for station in stations:
        if station["lineid_id"] not in line_set:
            line_set.add(station["lineid_id"])
            line = {"value": station["lineid_id"], "label": str(station["lineid_id"]) + "号线", "children": []}
            children = {"value": station["stationid"], "label": station["stationname"]}
            line["children"].append(children)
            res.append(line)
        else:
            for line in res:
                if line["value"] == station["lineid_id"]:
                    children = {"value": station["stationid"], "label": station["stationname"]}
                    line["children"].append(children)
    response["list"] = res
    return JsonResponse(response)


# 获取历史客流量
@require_http_methods(["GET"])
def getFlow(request):
    lineID = request.GET["lineID"]
    stationID = request.GET["stationID"]
    date = request.GET["date"]
    # lineID = '3'
    # stationID = '303'
    # date = '2018-06-05'
    dateFormat = datetime.datetime.strptime(date, "%Y-%m-%d")
    lineRes = Linedayflow.objects.filter(lineid=lineID, date=dateFormat).values()
    stationRes = Stationdayflow.objects.filter(stationid=stationID, date=dateFormat).values()
    stationName = Station.objects.filter(stationid=stationID).values()[0]["stationname"]
    lineFlowList = []
    stationFlowList = []
    for line in lineRes:
        lineFlowList.append(line["flow"])
    for station in stationRes:
        stationFlowList.append(station["flow"])
    response = {"lineCheck": {"lineID": lineID, "lineName": str(lineID) + "号线", "date": date, "flow": lineFlowList},
                "stationCheck": {"stationID": stationID, "stationName": stationName, "date": date,
                                 "flow": stationFlowList}
                }
    return JsonResponse(response)


# 预测客流量
@csrf_exempt
@require_http_methods(["POST"])
def predict(request):
    params = json.loads(request.body)["params"]
    lineID = params["lineID"]
    stationID = params["stationID"]
    date = params["date"]
    methodList = params["methods"][1:-1].replace('"', '').split(',')
    response = {}
    response["station"] = {}
    response["line"] = {}
    # lineID = '3'
    # stationID = '303'
    # date = '2018-06-03'
    # methodList= "BPNN,BPNN"
    dayType = datetime.datetime.strptime(date, "%Y-%m-%d").weekday() + 1
    stationName = Station.objects.filter(stationid=stationID).values()[0]["stationname"]
    for method in methodList:
        print(method)
        m = datetime.timedelta(hours=7)
        station_pre_flow = list()
        line_pre_flow = list()
        station_test_flow = list()
        line_test_flow = list()
        while m <= datetime.timedelta(hours=23):
            station_data = get_station_data(m, stationID, date)
            line_data = get_line_data(m, lineID, date)
            station_res = train(station_data, method)
            line_res = train(line_data, method)
            station_test_flow.append(station_res[0])
            station_pre_flow.append(station_res[1])
            line_test_flow.append(line_res[0])
            line_pre_flow.append(line_res[1])
            m = m + datetime.timedelta(minutes=5)
        station_mse = metrics.mean_squared_error(station_test_flow, station_pre_flow)
        station_rmse = np.sqrt(station_mse)
        station_mae = metrics.mean_absolute_error(station_test_flow, station_pre_flow)
        line_mse = metrics.mean_squared_error(line_test_flow, line_pre_flow)
        line_rmse = np.sqrt(line_mse)
        line_mae = metrics.mean_absolute_error(line_test_flow, line_pre_flow)
        response["station"][method] = {
            "preflow": station_pre_flow, "mse": int(station_mse), "rmse": int(station_rmse), "mae": int(station_mae)
        }
        response["line"][method] = {
            "preflow": line_pre_flow, "mse": int(line_mse), "rmse": int(line_rmse), "mae": int(line_mae)
        }

    response["station"]["stationName"] = stationName
    response["station"]["stationID"] = stationID
    response["station"]["date"] = date
    response["station"]["testflow"] = station_test_flow

    response["line"]["lineName"] = str(lineID) + "号线"
    response["line"]["lineID"] = lineID
    response["line"]["date"] = date
    response["line"]["testflow"] = line_test_flow
    return JsonResponse(response)


#
# # bpnn预测客流量
# @require_http_methods(["GET"])
# def bpnnpredict(request):
#     # print(request)
#     # print("12345")
#     lineID = request.GET["lineID"]
#     stationID = request.GET["stationID"]
#     date = request.GET["date"]
#     # lineID = '3'
#     # stationID = '303'
#     # date = '2018-06-03'
#     dayType = datetime.datetime.strptime(date, "%Y-%m-%d").weekday() + 1
#     stationName = Station.objects.filter(stationid=stationID).values()[0]["stationname"]
#     m = datetime.timedelta(hours=7)
#     station_test_flow = list()
#     station_pre_flow = list()
#     line_test_flow = list()
#     line_pre_flow = list()
#     while m <= datetime.timedelta(hours=23):
#         station_data = get_station_data(m, stationID, date)
#         line_data = get_line_data(m, lineID, date)
#         station_res = bpnn_train(station_data)
#         line_res = bpnn_train(line_data)
#         station_test_flow.append(station_res[0])
#         station_pre_flow.append(station_res[1])
#         line_test_flow.append(line_res[0])
#         line_pre_flow.append(line_res[1])
#         m = m + datetime.timedelta(minutes=5)
#
#     station_mse = metrics.mean_squared_error(station_test_flow, station_pre_flow)
#     station_rmse = np.sqrt(station_mse)
#     station_mae = metrics.mean_absolute_error(station_test_flow, station_pre_flow)
#     line_mse = metrics.mean_squared_error(line_test_flow, line_pre_flow)
#     line_rmse = np.sqrt(line_mse)
#     line_mae = metrics.mean_absolute_error(line_test_flow, line_pre_flow)
#     response = {"line": {"lineID": lineID, "lineName": str(lineID) + "号线", "date": date,
#                          "testflow": line_test_flow, "preflow": line_pre_flow,
#                          "mse": int(line_mse), "rmse": int(line_rmse), "mae": int(line_mae)},
#                 "station": {"stationID": stationID, "stationName": stationName, "date": date,
#                             "testflow": station_test_flow, "preflow": station_pre_flow,
#                             "mse": int(station_mse), "rmse": int(station_rmse), "mae": int(station_mae)}
#                 }
#
#     return JsonResponse(response)
#
#
# # 随机森林预测客流量
# @require_http_methods(["GET"])
# def rfpredict(request):
#     # print(request)
#     # print("12345")
#     lineID = request.GET["lineID"]
#     stationID = request.GET["stationID"]
#     date = request.GET["date"]
#     # lineID = '3'
#     # stationID = '303'
#     # date = '2018-06-03'
#     dayType = datetime.datetime.strptime(date, "%Y-%m-%d").weekday() + 1
#     stationName = Station.objects.filter(stationid=stationID).values()[0]["stationname"]
#     m = datetime.timedelta(hours=7)
#     station_test_flow = list()
#     station_pre_flow = list()
#     line_test_flow = list()
#     line_pre_flow = list()
#     while m <= datetime.timedelta(hours=23):
#         station_data = get_station_data(m, stationID, date)
#         line_data = get_line_data(m, lineID, date)
#         station_res = rf_train(station_data)
#         line_res = rf_train(line_data)
#         station_test_flow.append(station_res[0])
#         station_pre_flow.append(station_res[1])
#         line_test_flow.append(line_res[0])
#         line_pre_flow.append(line_res[1])
#         m = m + datetime.timedelta(minutes=5)
#
#     station_mse = metrics.mean_squared_error(station_test_flow, station_pre_flow)
#     station_rmse = np.sqrt(station_mse)
#     station_mae = metrics.mean_absolute_error(station_test_flow, station_pre_flow)
#     line_mse = metrics.mean_squared_error(line_test_flow, line_pre_flow)
#     line_rmse = np.sqrt(line_mse)
#     line_mae = metrics.mean_absolute_error(line_test_flow, line_pre_flow)
#     response = {"line": {"lineID": lineID, "lineName": str(lineID) + "号线", "date": date,
#                          "testflow": line_test_flow, "preflow": line_pre_flow,
#                          "mse": int(line_mse), "rmse": int(line_rmse), "mae": int(line_mae)},
#                 "station": {"stationID": stationID, "stationName": stationName, "date": date,
#                             "testflow": station_test_flow, "preflow": station_pre_flow,
#                             "mse": int(station_mse), "rmse": int(station_rmse), "mae": int(station_mae)}
#                 }
#
#     return JsonResponse(response)
#
#
# # # SVR预测客流量
# # @require_http_methods(["GET"])
# # def svrpredict(request):
# #     # print(request)
# #     # print("12345")
# #     lineID = request.GET["lineID"]
# #     stationID = request.GET["stationID"]
# #     date = request.GET["date"]
# #     # lineID = '3'
# #     # stationID = '303'
# #     # date = '2018-06-03'
# #     dayType = datetime.datetime.strptime(date, "%Y-%m-%d").weekday() + 1
# #     stationName = Station.objects.filter(stationid=stationID).values()[0]["stationname"]
# #     m = datetime.timedelta(hours=7)
# #     station_test_flow = list()
# #     station_pre_flow = list()
# #     line_test_flow = list()
# #     line_pre_flow = list()
# #     while m <= datetime.timedelta(hours=23):
# #         print(m)
# #         station_data = get_station_data(m, stationID, date)
# #         line_data = get_line_data(m, lineID, date)
# #         station_res = svr_train(station_data)
# #         line_res = svr_train(line_data)
# #         station_test_flow.append(station_res[0])
# #         station_pre_flow.append(station_res[1])
# #         line_test_flow.append(line_res[0])
# #         line_pre_flow.append(line_res[1])
# #         m = m + datetime.timedelta(minutes=5)
# #
# #     station_mse = metrics.mean_squared_error(station_test_flow, station_pre_flow)
# #     station_rmse = np.sqrt(station_mse)
# #     station_mae = metrics.mean_absolute_error(station_test_flow, station_pre_flow)
# #     line_mse = metrics.mean_squared_error(line_test_flow, line_pre_flow)
# #     line_rmse = np.sqrt(line_mse)
# #     line_mae = metrics.mean_absolute_error(line_test_flow, line_pre_flow)
# #     response = {"line": {"lineID": lineID, "lineName": str(lineID) + "号线", "date": date,
# #                          "testflow": line_test_flow, "preflow": line_pre_flow,
# #                          "mse": int(line_mse), "rmse": int(line_rmse), "mae": int(line_mae)},
# #                 "station": {"stationID": stationID, "stationName": stationName, "date": date,
# #                             "testflow": station_test_flow, "preflow": station_pre_flow,
# #                             "mse": int(station_mse), "rmse": int(station_rmse), "mae": int(station_mae)}
# #                 }
# #
# #     return JsonResponse(response)
#
# # SVR预测客流量
# @require_http_methods(["GET"])
# def svrpredict(request):
#     # print(request)
#     # print("12345")
#     lineID = int(request.GET["lineID"])
#     stationID = request.GET["stationID"]
#     date = request.GET["date"]
#     # lineID = int('3')
#     # stationID = '603'
#     # date = '2018-06-03'
#     day = datetime.datetime.strptime(date, "%Y-%m-%d").weekday() + 1
#     dayType = str(day_type(int(day)))
#     stationName = Station.objects.filter(stationid=stationID).values()[0]["stationname"]
#     m = datetime.timedelta(hours=7)
#     station_test_flow = list()
#     station_pre_flow = list()
#     line_test_flow = list()
#     line_pre_flow = list()
#     with open('D:\PythonProject\graduationProject\saveModels\svr.pkl', 'rb') as f:
#         svr_model = pickle.load(f)
#
#     while m <= datetime.timedelta(hours=23):
#         print(m)
#         line_model = svr_model["line"][lineID][dayType][m]
#         station_model = svr_model["station"][stationID][dayType][m]
#         station_data = get_station_data(m, stationID, date)
#         line_data = get_line_data(m, lineID, date)
#         station_test_flow.append(station_data[-1][-1])
#         line_test_flow.append(line_data[-1][-1])
#         station_res = svr_train(station_data, station_model)
#         line_res = svr_train(line_data, line_model)
#         station_pre_flow.append(station_res)
#         line_pre_flow.append(line_res)
#         m = m + datetime.timedelta(minutes=5)
#     station_mse = metrics.mean_squared_error(station_test_flow, station_pre_flow)
#     station_rmse = np.sqrt(station_mse)
#     station_mae = metrics.mean_absolute_error(station_test_flow, station_pre_flow)
#     line_mse = metrics.mean_squared_error(line_test_flow, line_pre_flow)
#     line_rmse = np.sqrt(line_mse)
#     line_mae = metrics.mean_absolute_error(line_test_flow, line_pre_flow)
#     response = {"line": {"lineID": lineID, "lineName": str(lineID) + "号线", "date": date,
#                          "testflow": line_test_flow, "preflow": line_pre_flow,
#                          "mse": int(line_mse), "rmse": int(line_rmse), "mae": int(line_mae)},
#                 "station": {"stationID": stationID, "stationName": stationName, "date": date,
#                             "testflow": station_test_flow, "preflow": station_pre_flow,
#                             "mse": int(station_mse), "rmse": int(station_rmse), "mae": int(station_mae)}
#                 }
#
#     return JsonResponse(response)


def day_type(day):
    if 1 <= day <= 5:
        return 0
    else:
        return 1
    return 1


def get_station_data(time, stationID, date):
    conn = pymysql.connect(host='localhost',  # 连接名称
                           user='root',  # 用户名
                           passwd='q19723011',  # 密码
                           port=3306,  # 端口，默认为3306
                           db='month6',  # 数据库
                           charset='utf8',  # 字符编码
                           )
    cur = conn.cursor()  # 生成游标对象
    dataProcess = []
    start_time = "".join(str(time - datetime.timedelta(minutes=30)).split(':')[:3])
    sql = "select * from station_time_flow where time >= %s and  time <= %s and stationID = %d" \
          % (start_time, "".join(str(time).split(':')[:3]), int(stationID))
    cur.execute(sql)
    datas = list(cur.fetchall())
    ob_dayType = day_type(datetime.datetime.strptime(date, "%Y-%m-%d").date().weekday() + 1)
    ob_data_list = list()
    for i in range(0, len(datas), 7):
        if datas[i][1] == datetime.datetime.strptime(date, "%Y-%m-%d").date():
            ob_data_list = [datas[i][3], datas[i + 1][3],
                            datas[i + 2][3], datas[i + 3][3], datas[i + 4][3],
                            datas[i + 5][3], datas[i + 6][3]]
        elif day_type(datas[i][1].weekday() + 1) == ob_dayType:
            data_list = [datas[i][3], datas[i + 1][3],
                         datas[i + 2][3], datas[i + 3][3], datas[i + 4][3],
                         datas[i + 5][3], datas[i + 6][3]]
            dataProcess.append(data_list)
    dataProcess.append(ob_data_list)

    conn.commit()
    cur.close()  # 关闭游标
    conn.close()  # 关闭连接
    return dataProcess


def get_line_data(time, lineID, date):
    conn = pymysql.connect(host='localhost',  # 连接名称
                           user='root',  # 用户名
                           passwd='q19723011',  # 密码
                           port=3306,  # 端口，默认为3306
                           db='month6',  # 数据库
                           charset='utf8',  # 字符编码
                           )
    cur = conn.cursor()  # 生成游标对象
    dataProcess = []
    start_time = "".join(str(time - datetime.timedelta(minutes=30)).split(':')[:3])
    sql = "select * from line_time_flow where time >= %s and  time <= %s and lineID = %d" \
          % (start_time, "".join(str(time).split(':')[:3]), int(lineID))
    cur.execute(sql)
    datas = list(cur.fetchall())
    ob_dayType = day_type(datetime.datetime.strptime(date, "%Y-%m-%d").date().weekday() + 1)
    ob_data_list = list()
    for i in range(0, len(datas), 7):
        if datas[i][1] == datetime.datetime.strptime(date, "%Y-%m-%d").date():
            ob_data_list = [datas[i][3], datas[i + 1][3],
                            datas[i + 2][3], datas[i + 3][3], datas[i + 4][3],
                            datas[i + 5][3], datas[i + 6][3]]
        elif day_type(datas[i][1].weekday() + 1) == ob_dayType:
            data_list = [datas[i][3], datas[i + 1][3],
                         datas[i + 2][3], datas[i + 3][3], datas[i + 4][3],
                         datas[i + 5][3], datas[i + 6][3]]
            dataProcess.append(data_list)
    dataProcess.append(ob_data_list)

    conn.commit()
    cur.close()  # 关闭游标
    conn.close()  # 关闭连接
    return dataProcess


# # def bpnn_train(datas):
# #     # 处理数据，划分训练集，测试集，归一化处理，独热编码处理
# #     datas = np.array(datas)
# #     data_case = datas[:, 0:6]  # 获取特征值
# #     data_label = datas[:, 6:7]  # 获取标签
# #     mm = MinMaxScaler()
# #     data_label_process = mm.fit_transform(data_label)  # 对数据归一化处理
# #     mm_case = MinMaxScaler()
# #     data_case_process = mm_case.fit_transform(data_case)  # 对数据归一化处理
# #     test_data_case = data_case_process[len(data_case_process) - 1:]
# #     test_data_label = data_label_process[len(data_label_process) - 1:]
# #     train_data_case = data_case_process[0:len(data_case_process) - 1]
# #     train_data_label = data_label_process[0:len(data_label_process) - 1]
# #     # 训练模型
# #     model = MLPRegressor(hidden_layer_sizes=(7, 8, 8), activation='tanh', solver='adam', max_iter=2000,
# #                          learning_rate='adaptive', learning_rate_init=0.02)  # BP神经网络回归模型
# #     model.fit(train_data_case, train_data_label.ravel())  # 训练模型
# #     pre_train = model.predict(train_data_case)  # 模型训练集预测
# #     pre_test = model.predict(test_data_case)  # 模型测试机预测
# #     pre = mm.inverse_transform(np.append(pre_train, pre_test).reshape(1, -1))[0]  # 反归一化
# #     return [float(data_label[-1][0]), float(int(pre[-1]))]
#
#
# # def svr_train(datas):
# #     # 处理数据，划分训练集，测试集，归一化处理，独热编码处理
# #     datas = np.array(datas)
# #     data_case = datas[:, 0:6]  # 获取特征值
# #     data_label = datas[:, 6:7]  # 获取标签
# #     mm = MinMaxScaler()
# #     data_label_process = mm.fit_transform(data_label)  # 对数据归一化处理
# #     mm_case = MinMaxScaler()
# #     data_case_process = mm_case.fit_transform(data_case)  # 对数据归一化处理
# #     test_data_case = data_case_process[len(data_case_process) - 1:]
# #     test_data_label = data_label_process[len(data_label_process) - 1:]
# #     train_data_case = data_case_process[0:len(data_case_process) - 1]
# #     train_data_label = data_label_process[0:len(data_label_process) - 1]
# #     # 训练模型
# #     model = SVR(kernel='rbf')
# #     model.fit(train_data_case, train_data_label.ravel())
# #     # print("Traing Score:%f" % regr.score(X_train, y_train))
# #     # print("Testing Score:%f" % regr.score(X_test, y_test))
# #     pre_train = model.predict(train_data_case)  # 模型训练集预测
# #     pre_test = model.predict(test_data_case)  # 模型测试机预测
# #
# #     # model = MLPRegressor(hidden_layer_sizes=(7, 8, 8), activation='tanh', solver='adam', max_iter=2000,
# #     #                      learning_rate='adaptive', learning_rate_init=0.02)  # BP神经网络回归模型
# #     # model.fit(train_data_case, train_data_label.ravel())  # 训练模型
# #     # pre_train = model.predict(train_data_case)  # 模型训练集预测
# #     # pre_test = model.predict(test_data_case)  # 模型测试机预测
# #     pre = mm.inverse_transform(np.append(pre_train, pre_test).reshape(1, -1))[0]  # 反归一化
# #     # show(data_label,pre)
# #     return [float(data_label[-1][0]), float(int(pre[-1]))]
# def svr_train(datas, model):
#     # 处理数据，划分训练集，测试集，归一化处理，独热编码处理
#     datas = np.array(datas)
#     data_case = datas[:, 0:6]  # 获取特征值
#     data_label = datas[:, 6:7]  # 获取标签
#     mm = MinMaxScaler()
#     mm.fit_transform(data_label)  # 对数据归一化处理
#     mm_case = MinMaxScaler()
#     data_case_process = mm_case.fit_transform(data_case)  # 对数据归一化处理
#     test_data_case = data_case_process[len(data_case_process) - 1:]
#     # 训练模型
#     pre = model.predict(test_data_case)
#     pre = mm.inverse_transform(pre.reshape(1, -1))[0]  # 反归一化
#     return pre[0]
#
#
# def rf_train(datas):
#     # 处理数据，划分训练集，测试集，归一化处理，独热编码处理
#     datas = np.array(datas)
#     data_case = datas[:, 0:6]  # 获取特征值
#     data_label = datas[:, 6:7]  # 获取标签
#     mm = MinMaxScaler()
#     data_label_process = mm.fit_transform(data_label)  # 对数据归一化处理
#     mm_case = MinMaxScaler()
#     data_case_process = mm_case.fit_transform(data_case)  # 对数据归一化处理
#     test_data_case = data_case_process[len(data_case_process) - 1:]
#     test_data_label = data_label_process[len(data_label_process) - 1:]
#     train_data_case = data_case_process[0:len(data_case_process) - 1]
#     train_data_label = data_label_process[0:len(data_label_process) - 1]
#     # 训练模型
#     regr = ensemble.RandomForestRegressor(max_features=6)
#     regr.fit(train_data_case, train_data_label.ravel())
#     # print("Traing Score:%f" % regr.score(X_train, y_train))
#     # print("Testing Score:%f" % regr.score(X_test, y_test))
#     pre_train = regr.predict(train_data_case)  # 模型训练集预测
#     pre_test = regr.predict(test_data_case)  # 模型测试机预测
#
#     # model = MLPRegressor(hidden_layer_sizes=(7, 8, 8), activation='tanh', solver='adam', max_iter=2000,
#     #                      learning_rate='adaptive', learning_rate_init=0.02)  # BP神经网络回归模型
#     # model.fit(train_data_case, train_data_label.ravel())  # 训练模型
#     # pre_train = model.predict(train_data_case)  # 模型训练集预测
#     # pre_test = model.predict(test_data_case)  # 模型测试机预测
#     pre = mm.inverse_transform(np.append(pre_train, pre_test).reshape(1, -1))[0]  # 反归一化
#     # show(data_label,pre)
#     return [float(data_label[-1][0]), float(int(pre[-1]))]


def train(datas, method):
    # 处理数据，划分训练集，测试集，归一化处理，独热编码处理
    datas = np.array(datas)
    data_case = datas[:, 0:6]  # 获取特征值
    data_label = datas[:, 6:7]  # 获取标签
    mm = MinMaxScaler()
    data_label_process = mm.fit_transform(data_label)  # 对数据归一化处理
    mm_case = MinMaxScaler()
    data_case_process = mm_case.fit_transform(data_case)  # 对数据归一化处理
    test_data_case = data_case_process[len(data_case_process) - 1:]
    test_data_label = data_label_process[len(data_label_process) - 1:]
    train_data_case = data_case_process[0:len(data_case_process) - 1]
    train_data_label = data_label_process[0:len(data_label_process) - 1]
    # 训练模型
    if method == "BPNN":
        model = MLPRegressor(hidden_layer_sizes=(100, 100), activation='relu', solver='adam', max_iter=2000,
                             learning_rate='adaptive')  # BP神经网络回归模型
    elif method == "RandomForest":
        model = ensemble.RandomForestRegressor(criterion="absolute_error")
    elif method == "SVR":
        model = SVR(kernel='rbf')
    elif method == "LinearRegression":
        model = linear_model.LinearRegression()
    elif method == "BayesianRidge":
        model = linear_model.BayesianRidge()
    elif method == "KNN":
        model = RadiusNeighborsRegressor()
    elif method == "RegressionTree":
        model = DecisionTreeRegressor(criterion="absolute_error")
    else:
        print("未找到")

    model.fit(train_data_case, train_data_label.ravel())  # 训练模型
    pre_train = model.predict(train_data_case)  # 模型训练集预测
    pre_test = model.predict(test_data_case)  # 模型测试机预测
    pre = mm.inverse_transform(np.append(pre_train, pre_test).reshape(1, -1))[0]  # 反归一化
    return [float(data_label[-1][0]), float(int(pre[-1]))]
