import getopt
import sys
import time

import pandas as pd

from air_web.data_platform import sql_engine
from air_web.web_flask.dal.base_dal import EsBaseDal


def get_id_df(res):
    id_list = []
    for row in res:
        id_list.append([row[0][1]["c_id"], row[0][0]["type_id"]])
    id_df = pd.DataFrame(id_list, columns=["c_id", "type_id"])
    return id_df


def get_on7_list():
    sql = "select distinct on7 from c_cons"
    df = sql_engine.query(sql)
    on7_list = df["on7"].tolist()
    return on7_list


def stats(start_date, end_date, powerload_idx):
    on7_list = get_on7_list()
    cons_df = pd.DataFrame()
    for on7 in on7_list:
        print("proc:", on7)
        on7_str = str(on7)
        on5_str = on7_str[0:5]

        t1 = time.time()
        rules = [
            ("data_time", "query", ">=", start_date),
            ("data_time", "query", "<=", end_date),
            ("on7", "query", "=", on7),
            ("type_id", "same", ">", "0"),
            ("c_id", "same", ">", "0"),
        ]

        index_name = (f"{powerload_idx}{suffix}-{on5_str}-{on7_str}",)
        res = es_dal.get_group_vector(
            rules, index_name, doc_time_field="data_time"
        )
        print(index_name, res)
        t2 = time.time()
        id_df = get_id_df(res)
        t3 = time.time()
        print(t2 - t1, t3 - t2)
        id_df["on7"] = on7_str
        id_df["on5"] = on5_str
        id_df["sheng"] = "99"

        cons_df = pd.concat([cons_df, id_df])

    on7_cons_count = cons_df["on7"].value_counts()
    type_df = cons_df.drop_duplicates(["on7", "type_id"])
    on7_type_count = type_df["on7"].value_counts()
    on7_se = pd.merge(
        on7_cons_count,
        on7_type_count,
        how="left",
        left_index=True,
        right_index=True,
    )
    on7_df = on7_se.reset_index().rename(
        columns={"index": "org_no", "on7_x": "cons_num", "on7_y": "type_num"}
    )

    on5_cons_count = cons_df["on5"].value_counts()
    type_df = cons_df.drop_duplicates(["on5", "type_id"])
    on5_type_count = type_df["on5"].value_counts()
    on5_se = pd.merge(
        on5_cons_count,
        on5_type_count,
        how="left",
        left_index=True,
        right_index=True,
    )
    on5_df = on5_se.reset_index().rename(
        columns={"index": "org_no", "on5_x": "cons_num", "on5_y": "type_num"}
    )

    all_cons_count = cons_df["sheng"].value_counts()
    type_df = cons_df.drop_duplicates(["sheng", "type_id"])
    all_type_count = type_df["sheng"].value_counts()
    all_se = pd.merge(
        all_cons_count,
        all_type_count,
        how="left",
        left_index=True,
        right_index=True,
    )
    all_df = all_se.reset_index().rename(
        columns={
            "index": "org_no",
            "sheng_x": "cons_num",
            "sheng_y": "type_num",
        }
    )

    res_df = pd.concat([on7_df, on5_df])
    res_df = pd.concat([res_df, all_df])
    res_df["year"] = start_date[0:4]

    sql_engine.insert_df(res_df, "area_cons_num")


def main(powerload_idx, start_date, end_date, process_num=2):
    date_list = pd.date_range(
        start=start_date, end=end_date, freq="YS"
    ).strftime("%Y-%m-%d")
    # pro_pool = Pool(process_num)
    for i in range(len(date_list) - 1):
        start_date = date_list[i]
        end_date = date_list[i + 1]
        stats(start_date, end_date, powerload_idx)

    #     result = pro_pool.apply_async(func=stats, args=(
    #         start_date, end_date, powerload_idx, powerload_area_idx, filter_df))
    #
    # pro_pool.close()
    # pro_pool.join()


if __name__ == "__main__":
    es_host = "zxtech:Zxod112_shining10@192.168.83.22:19200"
    start_date = "2023-01-01"
    end_date = "2024-01-01"
    suffix = "_test_inc"

    opts, args = getopt.getopt(sys.argv[1:], "h:n:s:e:", ["suf="])
    for opt, val in opts:
        if opt == "-h":
            es_host = val
        elif opt == "-n":
            process_num = int(val)
        elif opt == "-s":
            start_date = val
        elif opt == "-e":
            end_date = val
        elif opt == "--suf":
            suffix = val

    powerload_idx = "aclr_res_power_96"
    es_dal = EsBaseDal(es_host)

    main(powerload_idx, start_date, end_date)
