import datetime
import traceback
import getopt
import sys

import numpy as np
import pandas as pd
import requests
from sqlalchemy import create_engine

from air_web.config.config import config
from air_web.web_flask.common.logger import init_log
from air_web.web_flask.dal.base_dal import EsBaseDal
from air_web.web_flask.tools.big_air import (
    get_city_cycle_coad,
    get_no_excute_cons,
)
from air_web.web_flask.tools.big_screen import ACLoadExcute
from air_web.web_flask.tools.kputil_old import get_baseline_data
from air_web.web_flask.tools.zb_data import ZBData

log = init_log(None)

baseline_coef = 1
now = datetime.date.today().strftime("%Y-%m-%d")
year = datetime.datetime.now().year
es_dal = EsBaseDal(config["ES_HOST"])
connect = create_engine(
    "mysql+pymysql://{}:{}@{}:{}/{}".format(
        config.get("MYSQL_USER"),
        config.get("MYSQL_PASSWORD"),
        config.get("MYSQL_HOST"),
        config.get("MYSQL_PORT"),
        config.get("MYSQL_DB_NAME"),
    )
)


def get_on5_list():
    sql = "select org_no from real_org_no where org_level=1 "
    df = pd.read_sql(sql, connect)
    on5_list = df["org_no"].tolist()
    return on5_list


def get_type_max_p_kt(day_time, after_day, type_id, kt_field):
    sql = f"""select org_no as orgNo, p_kt_sum as {kt_field}, data_time 
              from orgno_typeid_15min 
              where data_time >='{day_time}'
                and data_time < '{after_day}'
                and org_no in (select org_no from real_org_no where org_level<2)
                and type_id = {type_id}
           """
    type_df = pd.read_sql(sql, connect)
    return type_df


def get_org_load(day_time):
    after_day = (
            datetime.datetime.strptime(day_time, "%Y-%m-%d")
            + datetime.timedelta(days=1)
    ).strftime("%Y-%m-%d")

    sql = f"""select org_no as orgNo, max_p_kt as allUserMaxLoad, max_p_kt_time as data_time 
              from orgno_typeid_ymd_stat 
              where data_date='{day_time}'
                and org_no in (select org_no from real_org_no where org_level<2)
                and type_id = 0
                and date_type=1
           """
    all_df = pd.read_sql(sql, connect)

    busi_df = get_type_max_p_kt(day_time, after_day, 101, 'businessMaxLoad')
    comm_df = get_type_max_p_kt(day_time, after_day, 102, 'commonUseMaxLoad')

    all_df = pd.merge(all_df, busi_df, on=['orgNo', 'data_time'], how='left')
    all_df = pd.merge(all_df, comm_df, on=['orgNo', 'data_time'], how='left')
    all_df.drop('data_time', axis=1, inplace=True)
    return all_df


def get_org_count(count_field, type_id=None):
    where_sql = f"and pare_type_id={type_id}" if type_id else ""
    sql = f"""select 51101 as orgNo, count(1) as count from c_cons 
              where type_code_sort=1 {where_sql}"""
    pro_df = pd.read_sql(sql, connect)
    sql = f"""select on5 as orgNo, count(1) as count from c_cons 
              where type_code_sort=1 {where_sql} group by on5"""
    on5_df = pd.read_sql(sql, connect)

    all_df = pd.concat([pro_df, on5_df])
    all_df.rename(columns={'count': count_field}, inplace=True)
    return all_df


def get_org_user_count_load(day_time):
    # 针对调控新增的接口，所以只统计高压用户

    # 用户数
    all_count_df = get_org_count("allUserCount")
    busi_count_df = get_org_count("businessUserCount", 101)
    comm_count_df = get_org_count("commonUserCount", 102)

    res_df = pd.merge(all_count_df, busi_count_df, on='orgNo')
    res_df = pd.merge(res_df, comm_count_df, on='orgNo')

    # 最大空调负荷
    max_kt_df = get_org_load(day_time)
    res_df = pd.merge(res_df, max_kt_df, on='orgNo', how="left")

    # 没有可调能力
    for col in ['allUserMaxAdjustMeasure', 'businessMaxAdjustMeasure',
                'commonUserMaxAdjustMeasure']:
        res_df[col] = None

    res_df['day'] = day_time
    res_df.replace({np.nan: None}, inplace=True)

    res_list = res_df.to_dict("records")
    return res_list


def get_baseline_day(day_time):
    """
    取上个月某一天的空调负荷作为基线（96点），选取条件为该月全省最高温度与今日最高温度相同
    （如无，则上浮最接近的值，如今日最高温度32度，去年7月中每天最高温度没有32度，则上浮取最接近的33)
    :param day_time:
    :return:
    """
    baseline_day = (datetime.datetime.strptime(day_time, "%Y-%m-%d") - datetime.timedelta(
        days=1)).strftime('%Y-%m-%d')
    log.info(f"日期:{day_time}，基线日:{baseline_day}")
    return baseline_day

    aclr_weather_hour = config.get("ACLR_WEATHER_HOUR", "aclr_weather_hour")
    this_month_start = datetime.datetime.strptime(
        day_time, "%Y-%m-%d"
    ).replace(day=1)
    last_month_end = this_month_start - datetime.timedelta(days=1)
    last_month_start = last_month_end.replace(day=1)

    date_list = pd.date_range(
        start=last_month_start, end=last_month_end, freq="1d"
    ).strftime("%Y-%m-%d")
    tmp_list = []
    for date in date_list:
        rules = [
            ("datetime", "query", "=", date),
            ("tmp", "stat", "max", ">", "0"),
        ]

        res = es_dal.get_group_vector(
            rules, aclr_weather_hour, doc_time_field="datetime"
        )
        if len(res) > 0:
            tmp = round(res[0][1], 2)
            tmp_list.append({"date": date, "tmp": tmp})
        else:
            log.warning(f"缺少天气数据:{date}")
    base_tmp_df = pd.DataFrame(tmp_list)

    rules = [
        ("datetime", "query", "=", day_time),
        ("tmp", "stat", "max", ">", "0"),
    ]
    res = es_dal.get_group_vector(
        rules, aclr_weather_hour, doc_time_field="datetime"
    )
    if len(res) == 0:
        log.warning(f"缺少天气数据:{day_time},无法确定基线日")
        return None
    today_tmp = round(res[0][1], 2)

    greater_df = base_tmp_df[base_tmp_df["tmp"] >= today_tmp].sort_values(
        "tmp"
    )
    if not greater_df.empty:
        baseline_day = greater_df.iloc[0]["date"]
    else:
        smaller_df = base_tmp_df[base_tmp_df["tmp"] < today_tmp].sort_values(
            "tmp", ascending=False
        )
        baseline_day = smaller_df.iloc[0]["date"]
    log.info(f"日期:{day_time}, 温度：{today_tmp}，基线日:{baseline_day}")
    return baseline_day


def get_on5_alter_low_sum(day_time, baseline_day):
    on5_list = get_on5_list()
    if not len(on5_list):
        return 0, 0
    baseline_end_day = (
        datetime.datetime.strptime(baseline_day, "%Y-%m-%d")
        + datetime.timedelta(days=1)
    ).strftime("%Y-%m-%d")
    after_day = (
        datetime.datetime.strptime(day_time, "%Y-%m-%d")
        + datetime.timedelta(days=1)
    ).strftime("%Y-%m-%d")
    if len(on5_list) == 1:
        on5_s = "org_no =  {}".format(on5_list[0])
    else:
        on5_s = "org_no in   ({}) ".format(",".join(map(str, on5_list)))
    s_sql = f"""   
           SELECT a.*, b.base_load FROM
           ((
                               SELECT
               DATE_FORMAT( data_time, '%%H:%%i' ) quarter,
               sum(p_kt_sum) real_load
           FROM
               orgno_typeid_15min 
           WHERE
                data_time >=  '{day_time}'  and data_time <  '{after_day}' 
               AND type_id = 0 
           AND {on5_s} group by quarter
           ) a
           left join (	SELECT
               DATE_FORMAT( data_time, '%%H:%%i' ) quarter,
               sum(p_kt_sum) base_load
           FROM
               orgno_typeid_15min 
           WHERE
                data_time >=  '{baseline_day}'  and data_time <  '{baseline_end_day}' 
               AND type_id = 0 
           AND {on5_s}  group by quarter) b on a.quarter = b.quarter) 
                   """
    res_df = pd.read_sql(s_sql, connect)
    if not len(res_df):
        return 0, 0
    res_df["base_load"] = (
        res_df["base_load"] * baseline_coef
    )  # 因为基线太小，客户要求乘个系数
    res_df["kt_low"] = res_df["base_load"] - res_df["real_load"]
    log.debug(res_df[["quarter", "base_load", "real_load", "kt_low"]])
    log.debug(f"get low data {on5_list}")
    ktl = sorted(res_df["kt_low"].to_list())
    # if len(ktl) > 3:
    #     ktl = ktl[:-2]
    avg_ktl = sum(ktl) / len(ktl)
    max_ktl = max(ktl)
    max_ktl = max(100, min(max_ktl, 1000000))  # 客户要求该值不得小于等于0，不得大于100万kW
    return avg_ktl, max_ktl


def get_air_condition_summary(day_time, baseline_day):
    try:
        business_data_yes_c = (
            business_cons_max_load
        ) = city_count = join_bid = already_bid = business_cons_count = 0
        before_day = (
            datetime.datetime.strptime(day_time, "%Y-%m-%d")
            + datetime.timedelta(days=-1)
        ).strftime("%Y-%m-%d")
        after_day = (
            datetime.datetime.strptime(day_time, "%Y-%m-%d")
            + datetime.timedelta(days=1)
        ).strftime("%Y-%m-%d")

        business_cons_count_df = pd.read_sql(
            f"""
        select cons_num as business_cons_count from area_cons_num 
        where org_no = 51101  and year = {year} """,
            connect,
        )
        if len(business_cons_count_df):
            business_cons_count = business_cons_count_df.to_dict("records")[0][
                "business_cons_count"
            ]
        business_data = pd.read_sql(
            f"""   	
        SELECT
            max(p_kt_sum) business_cons_max_load 
        FROM
            orgno_typeid_15min 
        WHERE
            data_time >= '{day_time}' 
            AND data_time < '{after_day}' 
            AND org_no = 51101 
            AND type_id = 0
                     """,
            connect,
        )

        if len(business_data):
            business_cons_max_load = business_data.to_dict("records")[0][
                "business_cons_max_load"
            ]

        # business_data_yes = pd.read_sql(f"""
        # SELECT
        #     p_kt_sum business_cons_max_load
        # FROM
        #     orgno_typeid_15min
        # WHERE
        #     data_time >= '{day_time}'
        #     AND data_time < '{after_day}'
        #     AND org_no = 51101
        #     AND is_day_max = 1
        #     AND type_id = 0
        #              """, connect)
        #
        # if len(business_data_yes):
        #     business_data_yes_c = business_data_yes.to_dict('records')[0]['business_cons_max_load']

        city_control_df = pd.read_sql(
            f"""                    
               SELECT
                   '{day_time}' day,
                   sum( t1.n_cons_total ) join_bid,
                   sum( t1.n_cons_st ) already_bid ,
                   count(on5) city_count
               FROM
                   city_control t1 
               WHERE
                   t1.compute_date = '{day_time}' 
               GROUP BY
                   day
               """,
            connect,
        )

        on5_alter_low_sum, max_ktl = get_on5_alter_low_sum(
            day_time, baseline_day
        )
        result = {
            # "business_data_yes_c": business_data_yes_c,
            "business_cons_max_load": business_cons_max_load / 10000,
            "cycle_load": on5_alter_low_sum / 10000,
            "max_cycle_load": max_ktl / 10000,
            "day": day_time,
            "business_cons_count": business_cons_count / 10000,
            "city_count": city_count,
            "join_bid": join_bid,
            "already_bid": already_bid,
        }
        if len(city_control_df):
            result.update(city_control_df.to_dict("records")[0])
        return [result]
    except Exception as e:
        log.error(traceback.format_exc())


def get_city_complete_rate_data(day_time=now):
    s_sql= f"""
    SELECT
        c.on5 city_org_no,
        r.org_name city_org_name,
        '{day_time}'  day,
        sum(n_cons_total) join_bid,
        sum(n_cons_st) already_bid,
        ( sum(n_cons_st) / sum(n_cons_total) ) city_complete_rate 
    FROM
        city_control c 
    join real_org_no r
      on r.org_no = c.on5
    join city_approval_info cai 
      on c.on5=cai.org_no and c.control_time=cai.control_time
    WHERE
        compute_date = '{day_time}' 
	  group by c.on5,r.org_name
            """
    city_control_df = pd.read_sql(s_sql, connect)
    res = city_control_df.to_dict("records")
    for i in res:
        i["city_org_name"] = (
            i["city_org_name"]
            .replace("国网", "")
            .replace("供电公司", "")
            .replace("新区", "")
        )
    return res


def post_data(url, data):
    for i in range(3):
        log.info(f"start reqeust {url}")
        if not data:
            log.warning(f" data is null : {data}")
            return
        resp = requests.post(url=url, json=data)
        log.info(resp.content)
        if resp.status_code == 200:
            return


base_url = "http://25.214.217.116:20081/"

url_map = {
    "air_condition_summary": f"{base_url}airconditionSummary",
    "air_condition_load_execute": f"{base_url}airConditionerLoadExecute",
    "city_complete_rate_data": f"{base_url}cityCompleteRate",
    "baseline_data": f"{base_url}zhixiang/addDataAircondition",
    "no_excute_cons": f"{base_url}NotExecuteCons",
    "no_excute_cons_curve": f"{base_url}notExecuteConsCurve",
    "city_cycle_coad": f"{base_url}CityCycleCoad",
    "org_user_count_load": f"{base_url}airConditionOrgUserCountLoad",
}


def run_airconditionSummary(day_time, baseline_day):
    log.info("start #### air_condition_summary")
    air_condition_summary = get_air_condition_summary(day_time, baseline_day)
    log.debug(air_condition_summary)
    post_data(url_map["air_condition_summary"], air_condition_summary)


def run_city_complete_rate_data(day_time):
    log.info("start #### city_complete_rate_data")
    city_complete_rate_data = get_city_complete_rate_data(day_time=day_time)
    log.debug(city_complete_rate_data)
    post_data(url_map["city_complete_rate_data"], city_complete_rate_data)


def run_baseline_data(day_time, baseline_day):
    log.info("start #### baseline_data")
    baseline_data = get_baseline_data(day_time, baseline_day,False,False,1)
    log.debug(baseline_data)
    post_data(url_map["baseline_data"], baseline_data)


def run_city_cycle_coad(day_time, baseline_day):
    log.info("start #### city_cycle_coad")
    city_res_list = get_city_cycle_coad(day_time, baseline_day, baseline_coef)
    log.debug(city_res_list)
    post_data(url_map["city_cycle_coad"], city_res_list)


def run_no_excute_cons(day_time, baseline_day):
    log.info("start #### no_excute_cons")
    res_subst_cons_list, res_subst_curve_list = get_no_excute_cons(
        day_time, baseline_day
    )
    log.debug(res_subst_cons_list)
    log.debug(res_subst_curve_list)
    post_data(url_map["no_excute_cons"], res_subst_cons_list)
    post_data(url_map["no_excute_cons_curve"], res_subst_curve_list)


def run_org_user_count_load(day_time):
    log.info("start #### org_user_count_load")
    res_list = get_org_user_count_load(day_time)
    log.debug(res_list)
    post_data(url_map["org_user_count_load"], res_list)


def judge_sichuan():
    dd = pd.read_sql(
        "select * from real_org_no where org_no = 51101 ", connect
    )
    if not len(dd):
        log.info("没有四川供电所信息,应该非四川,跳过")
        return False
    return True

class SichuanData:

    def __init__(self, only_type=None):
        self.today_str = None
        self.baseline_day = None
        self.only_type = only_type

    def update_date(self):
        if self.only_type is None:  # 测试时该值不可能为None，只有main启动才会为None
            self.today_str = datetime.date.today().strftime("%Y-%m-%d")
            self.baseline_day = get_baseline_day(self.today_str)
            log.info(f"today:{self.today_str},baseline_day:{self.baseline_day}")

    def zb_tomorrow_main(self):
        self.update_date()
        ZBData(log, self.today_str).static_main()

    def zb_yesterday_main(self):
        self.update_date()
        ZBData(log, self.today_str).cons_index_main()
        ZBData(log, self.today_str).tmp_index_main()

    def zb_rt_main(self):
        self.update_date()
        ZBData(log, self.today_str).rt_main()

    def sichuan_rt_main(self):
        self.update_date()
        if self.baseline_day is None:
            return

        run_airconditionSummary(self.today_str, self.baseline_day)
        run_city_complete_rate_data(self.today_str)
        ACLoadExcute(log, self.today_str).main()
        run_baseline_data(self.today_str, self.baseline_day)
        run_city_cycle_coad(self.today_str, self.baseline_day)
        run_no_excute_cons(self.today_str, self.baseline_day)
        run_org_user_count_load(self.today_str)

    def test_main(self, today_str, baseline_day):
        self.today_str = today_str
        self.baseline_day = baseline_day if baseline_day else get_baseline_day(today_str)

        if self.only_type == 1:
            if self.baseline_day is None:
                return
            run_baseline_data(self.today_str, baseline_day)
        elif self.only_type == 2:
            self.zb_rt_main()
            self.zb_yesterday_main()
            self.zb_tomorrow_main()
        else:
            self.zb_rt_main()
            self.zb_yesterday_main()
            self.zb_tomorrow_main()
            self.sichuan_rt_main()


def sichuan_big_main_yesterday():
    yesterday_time = (
        datetime.date.today() - datetime.timedelta(days=1)
    ).strftime("%Y-%m-%d")
    baseline_day = get_baseline_day(yesterday_time)
    #baseline_day = "2023-07-10"
    if baseline_day is None:
        return

    run_baseline_data(yesterday_time, baseline_day)


if __name__ == "__main__":
    today_str = datetime.date.today().strftime("%Y-%m-%d")
    baseline_day = None
    only_type = 0
    web_mode = "development"

    opts, args = getopt.getopt(sys.argv[1:], "t:b:z:")
    for opt, val in opts:
        if opt == "-t":
            today_str = val
        elif opt == "-b":
            baseline_day = val
        elif opt == "-z":
            only_type = int(val)

    log = init_log(None, web_mode=web_mode)
    SichuanData(only_type).test_main(today_str, baseline_day)
