# -*- coding:utf-8 -*-
import datetime
import time
import traceback
from typing import List

import requests
import schedule
import uvicorn
import torch
# from fastapi import FastAPI, Depends, applications
from fastapi import Depends, FastAPI
from multiprocessing import Process

# from cooling_RL_online_0331_test import train_separated_online
#from cooling_RL_online_1015_test import train_all_online, FIFO_Stack, DQN_Agent, Config
from main_caiji import train_all_online
from model.database_entity import TLiquidCoolOid
from model.model_entity import LiquidCoolingOidInfo, RequestDataModel
# from model.model_test_1 import TestingInfoFsu, TestingInfoCdu, TestingInfo
from model.model_auto_load import TestingInfoFsu, TestingInfoCdu, TestingInfo
from util.config_reader import read_fastapi_config, get_business_config
from util.loguru_util import myLogger
from util.mysql_connnect import get_db, SessionLocal
from util.readexcel_util import readExcel, get_sheet_name
import json
import os

# from apscheduler.schedulers.background import BackgroundScheduler


# def swagger_monkey_patch(*args, **kwargs):
#     return get_swagger_ui_html(
#         *args, **kwargs,
#         swagger_js_url="swaggerUI/swagger-ui-bundle.js",
#         swagger_css_url="swaggerUI/swagger-ui.css",
#         swagger_favicon_url="swaggerUI/favicon-32x32.png"
#     )
#
#
# applications.get_swagger_ui_html = swagger_monkey_patch

# app = FastAPIOffline()
app = FastAPI()
# directory = get_root_path() + '/static/swaggerUI'
# app.mount(path='/swaggerUI', app=StaticFiles(directory=directory), name='swaggerUI')

# 创建后台执行任务的scheduler
# scheduler = BackgroundScheduler()
business_config = get_business_config()
requests.DEFAULT_RETRIES = 1

data_index: int = 0


@app.get("/ping")
def ping():
    return 'success'


# @app.post("/create")
# def create_info(data: LiquidCoolingOidInfo, db=Depends(get_db)):
#     data_obj = TLiquidCoolOid(**data.dict())
#     db.add(data_obj)
#     db.commit()
#     db.refresh(data_obj)
#     id = data_obj.id
#     return id


@app.get("/query")
def get_info():
    res = get_newest_data()
    return res


def get_newest_data():
    """ 获取采集的最新的一条数据"""
    result = []
    db_name1 = "locus"
    db_name2 = "locus_test"
    locus = SessionLocal(db_name1).session
    locus_test = SessionLocal(db_name2).session
    # engine = get_engine()
    # TLiquidCoolOid.metadata.create_all(engine)
    longrun_data = locus_test.query(TestingInfo).order_by(TestingInfo.testing_time.desc()).first()
    result.append(longrun_data.__repr__())
    locus_test.commit()
    cdu_data = locus.query(TestingInfoCdu).order_by(TestingInfoCdu.testing_time.desc()).first()
    result.append(cdu_data.__repr__())

    locus.commit()
    fsu_data = locus_test.query(TestingInfoFsu).order_by(TestingInfoFsu.testing_time.desc()).first()
    result.append(fsu_data.__repr__())
    locus_test.commit()
    locus_test.close()
    locus.close()
    longrun_data = json.loads(result[0])
    cdu_data = json.loads(result[1])

    #myLogger.info(f"current fsu data is {result[2]}")

    return result


# def get_liquid_cooling_data():
#     myLogger.info("ping")
#     myLogger.info("ping2")


def get_data_timed():
    if business_config is not None and business_config.get("url", None) is not None:
        request_url = business_config.get("url")
        try:
            response = requests.get(url=request_url)
            if response.status_code == 200:
                return json.loads(response.text)
            # data = response.json()
            # batch_insert_data(data)
        except Exception as e:
            myLogger.error(f"request data url failed: {request_url}")
            myLogger.error(traceback.format_exc())
    else:
        myLogger.error("get url config error")


def get_data_timed_with_args(dataList: List):
    """ old 通过表格来获取传入的数据"""

    # 此处获取数的代码必须放到main方法里面。不然会重复获取sheet页的名称
    # print(dataList)
    request_data, request_data_cdu = generateRequestData(dataList)
    if business_config is not None and business_config.get("set_ip", None) is not None:
        set_ip = business_config.get("set_ip")

        set_url = f"http://{set_ip}:10339/testing/setTestingInfo"
        set_url_cdu = f"http://{set_ip}:10310/testing/setTestingInfoCdu"
        myLogger.info(f"schedule_data_job start,request_data: {request_data}, request_data_url: {request_data_cdu}")
        myLogger.error(f"触发定时任务，获取设备数据")
        myLogger.error(request_data)
        try:
            response = requests.post(url=set_url, json=request_data)
            myLogger.info(f"request success: {set_url}")
            # data = response.json()
            # batch_insert_data(data)
        except Exception as e:
            myLogger.error(f"request data url failed: {set_url}")
            myLogger.error(traceback.format_exc())
        try:
            response_cdu = requests.post(url=set_url_cdu, json=request_data_cdu)
            myLogger.info(f"request success: {set_url_cdu}")

        except Exception as e:
            myLogger.error(f"request data url failed: {set_url_cdu}")
            myLogger.error(traceback.format_exc())

        # try:
        #     get_data = requests.post(url=set_url_cdu, json=request_data_cdu)
        #     myLogger.info(f"request success: {set_url_cdu}")
        #
        # except Exception as e:
        #     myLogger.error(f"get data url failed: {set_url_cdu}")
        #     myLogger.error(traceback.format_exc())
        return request_data
    else:
        myLogger.error("get url config error")

# todo 这里的入参也需要修改一下
def new_get_data_timed_with_args():
    """ old 通过表格来获取传入的数据"""

    # 此处获取数的代码必须放到main方法里面。不然会重复获取sheet页的名称
    # print(dataList)
    myLogger.info(f"##########################任务的开始#################")
    model_input_data = get_newest_data()
    # todo 这里需要修改
    request_datas = train_all_online(model_input_data)
    if business_config is not None and business_config.get("set_ip", None) is not None:
        set_ip = business_config.get("set_ip")

        set_url = f"http://{set_ip}:10339/testing/setTestingInfo"
        set_url_cdu = f"http://{set_ip}:10310/testing/setTestingInfoCdu"
        myLogger.info(f"schedule_data_job start,request_data: {request_datas[0]}, request_data_cdu: {request_datas[1]}")
        myLogger.info(f"触发定时任务，设置设备数据")
        myLogger.info(request_datas)
        try:
            response = requests.post(url=set_url, json=request_datas[0])
            myLogger.info(f"request success: {set_url}")
            # myLogger.info(f"result: {response.json()}")
            # data = response.json()
            # batch_insert_data(data)
        except Exception as e:
            myLogger.error(f"request data url failed: {set_url}")
            myLogger.error(traceback.format_exc())
        try:
            response_cdu = requests.post(url=set_url_cdu, json=request_datas[1])
            myLogger.info(f"request success: {set_url_cdu}")
            # myLogger.info(f"cdu result: {response_cdu.json()}")
        except Exception as e:
            myLogger.error(f"request data url failed: {set_url_cdu}")
            myLogger.error(traceback.format_exc())
        return request_data

    else:
        myLogger.error("get url config error")


# def schedule_data_job():
#     """ old 通过读取表格内的数据进行五分钟修改数据"""
#     sheet_name = get_sheet_name()
#     dataList = readExcel(sheet_name)
#     # schedule.every(10).seconds.do(get_data_timed_with_args)
#     schedule.every().hour.at(":00").do(get_data_timed_with_args, dataList=dataList)
#     schedule.every().hour.at(":05").do(get_data_timed_with_args, dataList=dataList)
#     schedule.every().hour.at(":10").do(get_data_timed_with_args, dataList=dataList)
#     schedule.every().hour.at(":15").do(get_data_timed_with_args, dataList=dataList)
#     schedule.every().hour.at(":20").do(get_data_timed_with_args, dataList=dataList)
#     schedule.every().hour.at(":25").do(get_data_timed_with_args, dataList=dataList)
#     schedule.every().hour.at(":30").do(get_data_timed_with_args, dataList=dataList)
#     schedule.every().hour.at(":35").do(get_data_timed_with_args, dataList=dataList)
#     schedule.every().hour.at(":40").do(get_data_timed_with_args, dataList=dataList)
#     schedule.every().hour.at(":45").do(get_data_timed_with_args, dataList=dataList)
#     schedule.every().hour.at(":50").do(get_data_timed_with_args, dataList=dataList)
#     schedule.every().hour.at(":55").do(get_data_timed_with_args, dataList=dataList)
#     # schedule.every(5).minutes.do(get_data_timed_with_args)
#     while True:
#         schedule.run_pending()
#         time.sleep(1)


def new_schedule_data_job():
    """ new 通过最新的数据和模型获取传参"""
    # schedule.every().hour.at(":00").do(new_get_data_timed_with_args)

    # 模型的位置
    # todo 这里需要修改，
    dryfan_model_file = r'./model_dryfan'
    cdu_model_file = r'./model_cdu'
    pump_model_file = r'./model_pump'

    #all_model_file = r'./model/model_allv1016'
    # longrun_stack = FIFO_Stack()
    # cdu_stack = FIFO_Stack()
    # fsu_stack = FIFO_Stack()
    #cfg = Config()

    # todo 这里需要重新定义，添加三个智能体

    #dryfan_agent = DQN_Agent(cfg, cfg.all_state_dim, cfg.all_action_space_dim, cfg.all_action_space)
    # cdu_agent = DQN_Agent(cfg, cfg.all_state_dim, cfg.cdu_action_dim, cfg.cdu_action_space)
    # pump_agent = DQN_Agent(cfg, cfg.all_state_dim, cfg.pump_action_dim, cfg.pump_action_space)

    #all_agent = DQN_Agent(cfg, cfg.all_state_dim, cfg.all_action_dim, cfg.all_action_space)

    # if os.path.exists(dryfan_model_file):
    #     try:
    #         # 进行导入模型的代码
    #         #todo 待修改
    #         # dryfan_agent.main_net.load_state_dict(torch.load(dryfan_model_file, map_location = torch.device('cpu')))
    #         # dryfan_agent.target_net.load_state_dict(torch.load(dryfan_model_file, map_location = torch.device('cpu')))
    #         # print("Loading initial longrun_agent model successful!")
    #         myLogger.info(f'Loading initial dryfan_agent model successful!')
    #     except Exception as e:
    #         # print("Loading initial longrun_agent model error:", e)
    #         myLogger.info(f'Loading initial dryfan_agent model error:{e}')

    # if os.path.exists(cdu_model_file):
    #     try:
    #         # 进行导入模型的代码
    #         # todo 待修改
    #         cdu_agent.main_net.load_state_dict(torch.load(cdu_model_file))
    #         cdu_agent.target_net.load_state_dict(torch.load(cdu_model_file))
    #         # print("Loading initial cdu_agent model successful!")
    #         myLogger.info(f'Loading initial cdu_agent model successful!')
    #     except Exception as e:
    #         # print("Loading initial cdu_agent model error:", e)
    #         myLogger.info(f'Loading initial cdu_agent model error:{e}')
    #
    # if os.path.exists(pump_model_file):
    #     try:
    #         # 进行导入模型的代码
    #         # todo 待修改
    #         pump_agent.main_net.load_state_dict(torch.load(pump_model_file))
    #         pump_agent.target_net.load_state_dict(torch.load(pump_model_file))
    #         # print("Loading initial cdu_agent model successful!")
    #         myLogger.info(f'Loading initial pump_agent model successful!')
    #     except Exception as e:
    #         # print("Loading initial cdu_agent model error:", e)
    #         myLogger.info(f'Loading initial pump_agent model error:{e}')

    # todo 这里的输入需要修改一下
        #schedule.every().hour.at(":{:02d}".format(x)).do(new_get_data_timed_with_args, cfg=cfg, dryfan_agent = dryfan_agent)
    schedule.every(10).seconds.do(new_get_data_timed_with_args)
    while True:
        schedule.run_pending()
        time.sleep(1)


# def batch_insert_data(liquidCoolOids: List):
#     """ 向数据库中批量插入数据"""
#     try:
#         db = get_db()
#         db.bulk_insert_mappings(TLiquidCoolOid, liquidCoolOids)
#         db.commit()
#     except Exception as e:
#         myLogger.error(f"batch insert data failed ")
#         myLogger.error(traceback.format_exc())


@app.get("/request")
def request_data():
    """
        单次请求
    :return:
    """
    response = get_data_timed()
    return response


@app.post("/request_with_arg")
def request_data(request_data: RequestDataModel):
    """
        单次请求
    :return:
    """
    response = get_data_timed_with_args(request_data)
    return response
# @app.post("/request_once")
# def request_data_once(request_data: RequestDataModel):
#     long_run_data = {
#         "dryValve1": request_data.dryFan1,
#         "dryValve2": request_data.dryFan2,
#         "dryValve3": request_data.dryFan3,
#         "dryValve4": request_data.dryFan4,
#         "dryValve5": request_data.dryFan5,
#         "pumpPump": request_data.pumpValue,
#         "pumpBypassValve": request_data.pumpBypassValve}
#     cdu_data = {'cduValve1': request_data.cduValve}
#     myLogger.info(f"request_data: {request_data}")
#
#     if business_config is not None and business_config.get("set_ip", None) is not None:
#         set_ip = business_config.get("set_ip")
#
#         set_url = f"http://{set_ip}:10339/testing/setTestingInfo"
#         set_url_cdu = f"http://{set_ip}:10310/testing/setTestingInfoCdu"
#         try:
#             response = requests.post(url=set_url, json=long_run_data)
#             myLogger.info(f"request success: {set_url}")
#             myLogger.info(f"longrun response: {response.content}")
#             # data = response.json()
#             # batch_insert_data(data)
#         except Exception as e:
#             myLogger.error(f"request data url failed: {set_url}")
#             myLogger.error(traceback.format_exc())
#         try:
#             response_cdu = requests.post(url=set_url_cdu, json=cdu_data)
#             myLogger.info(f"request success: {set_url_cdu}")
#             myLogger.info(f"longrun response: {response_cdu.content}")
#
#         except Exception as e:
#             myLogger.error(f"request data url failed: {set_url_cdu}")
#             myLogger.error(traceback.format_exc())
#
#         # try:
#         #     get_data = requests.post(url=set_url_cdu, json=request_data_cdu)
#         #     myLogger.info(f"request success: {set_url_cdu}")
#         #
#         # except Exception as e:
#         #     myLogger.error(f"get data url failed: {set_url_cdu}")
#         #     myLogger.error(traceback.format_exc())
#         return [long_run_data, cdu_data]
#     else:
#         myLogger.error("get url config error")

#


def generateRequestData(dataList):
    global data_index
    if data_index < len(dataList):
        data = dataList[data_index]
        request_data = {
            # "time": datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
            "dryValve1": data[0],
            "dryValve2": data[1],
            "dryValve3": data[2],
            "dryValve4": data[3],
            "dryValve5": data[4],
            "pumpPump": data[5],
            "pumpBypassValve": data[6]
        }
        request_data_cdu = {
            "cduValve1": data[7]
        }
        myLogger.error(f" data index is : {data_index}")
        data_index += 1
        jsonstr = json.dumps(request_data)
        jsonstr_cdu = json.dumps(request_data_cdu)
        return json.loads(jsonstr), json.loads(jsonstr_cdu)
    else:
        data_index = 0
        generateRequestData(dataList)


# @app.get("/save")
# def save_data():
#     """
#         批量保存数据
#         :return:
#     """
#     myLogger.info("batch save data")
#     url = business_config.get('url', None)
#     response = requests.get(url=url)
#     data = response.json()
#
#     liquid_cooling_datas = []
#     for i in data:
#         liquid_cooling_datas.append(**LiquidCoolingOidInfo(**i).dict())
#     batch_insert_data(liquid_cooling_datas)
#     pass


if __name__ == "__main__":
    myLogger.info("start service ...")
    # 获取数据测试
    # print(get_data_timed())

    # 发送指令测试
    fastapi_config = read_fastapi_config()
    # request_data = {
    #     "dryFan1": 30,
    #     "dryFan2": 30,
    #     "dryFan3": 30,
    #     "dryFan4": 30,
    #     "dryFan5": 30,
    #     "pumpValue": 40,
    #     "pumpBypassValve": 50,
    #     "cduValve": 50,
    # }
    # jsonstr = json.dumps(request_data)
    # jsonRe = json.loads(jsonstr)
    # get_data_timed_with_args(jsonRe)
    Process(target=new_schedule_data_job).start()
    uvicorn.run(app="run:app", host=fastapi_config['ip'], port=fastapi_config['port'], reload=False, debug=False,
                workers=fastapi_config['workers'])
