import getopt
import sys
import time
import traceback
from collections import ChainMap, defaultdict
from datetime import datetime, timedelta
from multiprocessing import Pool

import pandas as pd
from elasticsearch.exceptions import ConnectionTimeout, NotFoundError

from air_web.config.config import config
from air_web.data_platform import init_db
from air_web.dw.data_mapping import ConsType, DataType
from air_web.web_flask.dal.base_dal import EsBaseDal


class Singleton(type):
    def __init__(self, *args, **kwargs):
        self.__instance = None
        super().__init__(*args, **kwargs)

    def __call__(self, *args, **kwargs):
        if self.__instance is None:
            print(f"create new instance {self.__name__}")
            self.__instance = super().__call__(*args, **kwargs)
            return self.__instance
        else:
            return self.__instance


class MyEsBaseDal(EsBaseDal, metaclass=Singleton):
    def __init__(self, config):
        super().__init__(config)

    def reset(self):
        self.__class__._Singleton__instance = None


DATA_MAP = {
    "power": {
        DataType.P_TOTAL: "p_total",
        DataType.P_KT: "p_kt",
        DataType.P_BASE: "p_base",
        DataType.P_STD_LEFT: "p_std_left",
        DataType.P_STD_RIGHT: "p_std_right",
        DataType.P_KT_EST: "p_kt_est",
        DataType.P_BASE_EST: "p_base_est",
    },
    "baseline": {DataType.P_TOTAL: "p_total", DataType.P_KT: "p_kt"},
    "predict": {DataType.P_TOTAL: "p_total", DataType.P_KT: "p_kt"},
}


def convert_data_structure(es_res, start_time, index_type, time_map_dict):
    data_date = start_time.strftime("%Y-%m-%d")
    data_dict = defaultdict(dict)
    for r in es_res:
        cons_no = r["cons_no"]
        p_sort = time_map_dict[r["data_time"][-8:]]
        p_field = f"p_{p_sort}"

        if cons_no not in data_dict:
            default_dict = {
                "cons_no": cons_no,
                "data_date": data_date,
                "on5": r["on5"],
                "on7": r["on7"],
                "org_no": r["org_no"],
                "type_id": r["type_id"],
            }
            default_dict.update({f"p_{i}": None for i in range(1, 97)})

            for data_type in DATA_MAP[index_type].keys():
                data_dict[cons_no][data_type] = dict(
                    ChainMap(default_dict, {"data_type": data_type})
                )
        for data_type, value_field in DATA_MAP[index_type].items():
            data_dict[cons_no][data_type].update({p_field: r[value_field]})

    data_list = []
    for cons_no, cons_dict in data_dict.items():
        cons_list = [p_dict for key, p_dict in cons_dict.items()]
        data_list.extend(cons_list)
    return data_list


def sync_data(
    start_time, end_time, index_name, save_table_name, index_type, time_map_dict, on7=None
):
    sql_engine_cons.engine.dispose()
    es_dal = MyEsBaseDal(config["ES_HOST"])
    es_dal.reset()
    doc_time_field = "data_date" if index_type == "err" else "data_time"

    print(f"start {index_type},{start_time},{on7}")
    t1 = time.time()
    rules = [
        (doc_time_field, "query", ">=", start_time),
        (doc_time_field, "query", "<", end_time),
    ]
    if on7:
        rules.append(("on7", "query", "=", on7))
    es_res = None
    for i in range(10):
        try:
            es_res = es_dal.ruleng_query(
                rules,
                index_name=index_name,
                doc_time_field=doc_time_field,
                time_out=300,
            )
            break
        except NotFoundError:
            print(f"索引不存在:{index_name},{on7},{start_time},{end_time}")
            break
        except ConnectionTimeout:
            print(
                f"search失败次数:{i + 1},{index_type},{on7},{start_time},{end_time}"
            )
    if es_res is None:
        return
    t2 = time.time()

    if index_type in ["power", "baseline", "predict"]:
        data_list = convert_data_structure(es_res, start_time, index_type, time_map_dict)
    else:
        data_list = []
        for r in es_res:
            data_list.append(r)
    if len(data_list) == 0:
        print(f"数据为空,index:{index_name},{start_time},{on7}")
        return
    t3 = time.time()
    sql_engine_cons.update_data_by_id(data_list, save_table_name)
    t4 = time.time()
    print(f"""end {index_type},{start_time},{on7},数据条数:{len(data_list)}, time:{t4 - t1}, es:{t2 - t1},trans:{t3 - t2},mysql:{t4 - t3}""")


class ConsResSync:
    def __init__(self, start_date, end_date, process_num):
        self.ori_start_date = start_date
        self.ori_end_date = end_date
        self.process_num = process_num

        self.start_date = self.ori_start_date
        self.end_date = self.ori_end_date

        self.on7_dict = {}

    def get_index_name(self, index_type, start_time=None, on5=None, on7=None):
        """获取索引名称和存储表名称"""
        index_name = None
        save_tale_name = None
        if index_type == "power":
            if on5:
                is_cal = "1" if config["c_cons_filter_is_cal"] else "*"
                index_name = (
                    config["POWERLOAD"]
                    .replace("on5", str(on5))
                    .replace("on7", str(on7))
                    .replace("cal01", is_cal)
                )
            save_tale_name = config.get(
                "ACLR_RES_POWER_96", "aclr_res_power_96"
            )
        elif index_type == "baseline":
            if start_time:
                baseline_idx = config.get(
                    "BASELINE_IDX", "aclr_res_baseline-date"
                )
                index_name = baseline_idx.replace(
                    "date", start_time.strftime("%Y.%m.%d")
                )
            save_tale_name = config.get(
                "ACLR_RES_BASELINE", "aclr_res_baseline"
            )
        elif index_type == "predict":
            if on5:
                index_name = (
                    f"{config['POWERLOAD_PREDICT_PRE_IDX']}{on5}-{on7}"
                )
            save_tale_name = config.get(
                "ACLR_RES_PREDICT_96", "aclr_res_predict_96"
            )
        elif index_type == "err":
            index_name = config["ACLR_POWER_ERR_DAY_IDX"]
            save_tale_name = index_name

        return index_name, save_tale_name

    def on7_day_sync(self, index_type, date_list, time_map_dict):
        """先循环on7再循环日期"""
        pro_pool = Pool(self.process_num)
        results = []
        for on7, on5 in self.on7_dict.items():
            index_name, save_table_name = self.get_index_name(
                index_type, self.start_date, on5, on7
            )
            for i in range(len(date_list) - 1):
                start_time = date_list[i]
                end_time = date_list[i + 1]

                results.append(
                    pro_pool.apply_async(
                        func=sync_data,
                        args=(
                            start_time,
                            end_time,
                            index_name,
                            save_table_name,
                            index_type,
                            time_map_dict,
                            on7
                        )
                    )
                )
        try:
            [res.get() for res in results]
        except:
            print(traceback.format_exc())

        pro_pool.close()
        pro_pool.join()

    def day_on7_sync(self, index_type, date_list, time_map_dict):
        """先循环日期再循环on7"""
        pro_pool = Pool(self.process_num)
        results = []
        for i in range(len(date_list) - 1):
            start_time = date_list[i]
            end_time = date_list[i + 1]
            for on7, on5 in self.on7_dict.items():
                index_name, save_table_name = self.get_index_name(
                    index_type, start_time, on5, on7
                )
                results.append(
                    pro_pool.apply_async(
                        func=sync_data,
                        args=(
                            start_time,
                            end_time,
                            index_name,
                            save_table_name,
                            index_type,
                            time_map_dict,
                            on7
                        )
                    )
                )
        try:
            [res.get() for res in results]
        except:
            print(traceback.format_exc())

        pro_pool.close()
        pro_pool.join()

    @staticmethod
    def get_time_map_dict():
        time_list = pd.date_range('2023-01-01', periods=96, freq="15min").strftime('%H:%M:%S')
        time_df = pd.DataFrame(time_list, columns=["data_time"]).reset_index()
        time_df['index'] = time_df['index'] + 1
        time_map_dict = time_df.set_index('data_time')['index'].to_dict()
        return time_map_dict

    def sync_main(self, index_type, end_days):
        """
        同步数据主函数
        :param index_type:
        :param end_days: 同步从今天起n天的数据
        """
        time_map_dict = self.get_time_map_dict()

        if self.ori_start_date is None:
            self.start_date = datetime.combine(
                datetime.now().date() - timedelta(config["real_data_day"]),
                datetime.min.time(),
            )
            self.end_date = self.start_date + timedelta(days=end_days)

        print(f"同步数据开始 {index_type}, {self.start_date}, {self.end_date}")
        date_list = pd.date_range(
            start=self.start_date, end=self.end_date, freq="1D"
        )
        if index_type == 'baseline':
            self.day_on7_sync(index_type, date_list, time_map_dict)
        else:
            self.on7_day_sync(index_type, date_list, time_map_dict)

        print(f"同步数据结束 {index_type}, {self.start_date}, {self.end_date}")

    def get_on7_dict(self):
        """获取所有on7"""
        sql_engine = init_db(config["MYSQL_DB_NAME"])
        sql = f"""select distinct on5, on7 
                 from c_cons 
                 where cons_type in ({ConsType.ORDINARY_USER}, {ConsType.LINE_USER})
                 order by on5"""
        on7_df = sql_engine.query(sql)
        self.on7_dict = (
            on7_df[["on7", "on5"]].set_index("on7")["on5"].to_dict()
        )

    def delete_data(self):
        """删除3个月前的数据"""
        three_month_ago = (
            datetime.now() - timedelta(days=(90 + config["real_data_day"]))
        ).strftime("%Y-%m-%d")
        for index_type in ["power", "baseline", "predict"]:
            _, save_table_name = self.get_index_name(index_type)
            sql = f"delete from {save_table_name} where data_date < '{three_month_ago}'"
            sql_engine_cons.engine.execute(sql)
            print(f"已删除{three_month_ago}前的{save_table_name}数据")

    def main(self, index_types, is_delete):
        if is_delete:
            self.delete_data()

        self.get_on7_dict()

        print(f"要同步的数据类型:{index_types}")
        if "power" in index_types:
            self.sync_main("power", 1)
        if "baseline" in index_types:
            self.sync_main("baseline", 1)
        if "predict" in index_types:
            self.sync_main("predict", 3)
        if "err" in index_types:
            self.sync_main("err", 1)


if __name__ == "__main__":
    sql_engine_cons = init_db(config["MYSQL_CONS_DB_NAME"])

    start_date = None
    end_date = None
    process_num = 3
    index_types = []  # power,baseline,predict,err
    is_delete = False

    opts, args = getopt.getopt(sys.argv[1:], "s:e:p:i:d")
    for opt, val in opts:
        if opt == "-s":  # 起始时间，如果为空会取默认值
            start_date = datetime.strptime(val, "%Y-%m-%d")
        elif opt == "-e":  # 结束时间，如果为空会取默认值
            end_date = datetime.strptime(val, "%Y-%m-%d")
        elif opt == "-p":  # 进程数
            process_num = int(val)
        elif opt == "-i":  # 必须指定，不指定就不执行
            index_types = val.split(",")
        elif opt == "-d":  # 为True则删除3月前数据
            is_delete = True

    ConsResSync(start_date, end_date, process_num).main(index_types, is_delete)
