import getopt
import os
import sys
import traceback
from datetime import date, datetime, timedelta

import pandas as pd
from es_pandas import es_pandas

from air_web.config.config import config
from air_web.data_platform import mysql_con, sql_engine
from air_web.dw.logger import init_log
from air_web.web_flask.dal.base_dal import EsBaseDal

import ctypes;
if not os.path.exists("/home/zshield/logs/dw/"):
    os.mkdir("/home/zshield/logs/dw/")
log = init_log("/home/zshield/logs/dw/")


def get_all_index(input_index):
    idxs = es.cat.indices(index=input_index, format="json")
    names = [x["index"] for x in idxs]
    print(f"es:{config['ES_HOST']},获取到相关索引数量:{len(names)},退出")
    return names


def get_es_data(source_list, index_name, start_day, end_day):
    try:
        query = {
            "query": {
                "bool": {"must": []},
            },
            "_source": source_list,
        }

        # 添加查询条件

        if start_day:
            query["query"]["bool"]["must"].append(
                {"range": {"data_time": {"gte": start_day}}}
            )

        if end_day:
            query["query"]["bool"]["must"].append(
                {"range": {"data_time": {"lt": end_day}}}
            )
        my_lib = ctypes.CDLL('air_web.ruleng.es.cpython-38-x86_64-linux-gnu.so')
        # 执行查询
        res = my_lib.helpers.scan(client=es, index=index_name, query=query)
        df = pd.DataFrame(list(res))
        result = pd.DataFrame(df["_source"].tolist())
        return result
    except Exception as e:
        print(traceback.extract_stack())
        return pd.DataFrame()


def get_mysql_data(on5, on7, start_day, end_day, save_tables):
    sql = f"""select is_day_max, data_time, type_id, on5, p_total, on7, cons_no, p_kt  
              from {save_tables} 
              where on5={on5} and on7 ={on7} 
                and data_time >= '{start_day}' 
                and data_time < '{end_day}' order by data_time"""
    result = mysql_con.get(sql)
    sql_data_time = [i.get("data_time") for i in result]

    return sql_data_time, result


def main(input_index, out_table, start_date, end_date, key_list):
    try:
        print(f"执行多索引导入mysql， 索引前缀：{input_index}，写入表：{out_table}")
        index_names = get_all_index(input_index)  # 获取能匹配到的所有索引
        sort_index_names = sorted(
            index_names, key=lambda x: not str(x).endswith("9999999")
        )
        for search_index in sort_index_names[:5]:  # 遍历索引+按时间段划分子任务
            # 设置 -o 参数时 非on5的跳过
            if only_on5 and not str(search_index).endswith("9999999"):
                continue
            result = get_es_data(key_list, search_index, start_date, end_date)
            if len(result) == 0:
                continue
            result.drop_duplicates(
                subset=["data_time", "cons_no"], inplace=True
            )
            result.sort_values(["data_time"], inplace=True)
            on5 = (result["on5"].drop_duplicates().tolist())[0]
            on7 = (result["on7"].drop_duplicates().tolist())[0]
            es_data_time = result["data_time"].map(str).tolist()
            sql_data_time, sql_result = get_mysql_data(
                on5, on7, start_date, end_date, out_table
            )
            print(f"es的数据条数{len(es_data_time)}, 数据库的数据条数：{len(sql_data_time)}")
            diff_compare_list = es_data_time.copy()
            for i in sql_data_time:
                diff_compare_list.remove(i)
            if len(diff_compare_list) == 0 and len(sql_data_time) == len(
                es_data_time
            ):
                continue
            else:
                log.info(
                    f"存在数据缺失,on5,on7为{on5, on7}，缺失的数据时间点:,{([i for i in diff_compare_list])}"
                )
    except:
        print(traceback.format_exc())


def get_region_df():
    region_sql = f"select Distinct org_no, org_name, ad_org_name, p_org_no from real_org_no where org_level <3"
    return sql_engine.query(region_sql)


if __name__ == "__main__":
    my_lib = ctypes.CDLL('air_web.ruleng.es.cpython-38-x86_64-linux-gnu.so')
    es = my_lib.zElasticsearch(config["ES_HOST"])
    es_dal = EsBaseDal(config["ES_HOST"])
    ep = es_pandas(config["ES_HOST"])
    predict_index_name = config["POWERLOAD_PREDICT_VIRTUAL_IDX"]
    predict_save_table = config["POWERLOAD_PREDICT_VIRTUAL_TABLE"]
    # 仅同步on5级别的即 on7 == 9999999
    only_on5 = False
    # 开始时间或结束时间不指定时，全时间同步
    start_date = (date.today() - timedelta(days=1)).strftime("%Y-%m-%d")
    end_date = date.today().strftime("%Y-%m-%d")
    opts, args = getopt.getopt(sys.argv[1:], "s:e")
    for opt, val in opts:
        if opt == "-s":
            start_date = val
        elif opt == "-e":
            end_date = val
    key_list = [
        "on5",
        "on7",
        "type_id",
        "cons_no",
        "data_time",
        "p_total",
        "p_kt",
        "is_day_max",
    ]
    main(
        predict_index_name, predict_save_table, start_date, end_date, key_list
    )
