from typing import List, Callable

from elasticsearch import helpers, Elasticsearch

from sam.util.logUtil import LoggerUtil
from sam.wrapper.CommonWrapper import catch_and_print_exception


class EsConfig(object):
    def __init__(self, es_index: str, env: str, id_column: str = "id"):
        self.es_index = es_index
        self.env = env
        self.client = Elasticsearch(
            ["host"]
            , maxsize=25
            , http_auth=('user', 'pwd'))
        self.id_column = id_column


TAG_PROD_ES_CONFIG = EsConfig(es_index="sam-tag-client", env="prod")


class ES(LoggerUtil):
    """ index update create delete """

    def __init__(self, es_config: EsConfig):
        super().__init__(name="es")
        self.index = es_config.es_index
        self.env = es_config.env
        self.client = es_config.client
        self.logger.info(f"当前要执行的 环境是：{self.env}, 索引是 {self.index} ")
        self.query_index = -1
        self.id_column = es_config.id_column

    @catch_and_print_exception
    def batch_insert_or_update_by_bulk(self, data_list: List[dict], is_only_print_sql: bool = False,
                                       id_column: str = "id"):
        action_list = []
        if data_list:
            for data in data_list:
                new_action = {"_index": self.index, "_id": data[id_column], "_op_type": 'index',
                              "_source": data}
                if is_only_print_sql:
                    self.logger.info(f"将要执行的 批量 插入或更新 语句是: {new_action}")
                else:
                    action_list.append(new_action)
            if action_list:
                result = helpers.bulk(self.client, action_list)
                self.logger.info(f"批量执行的 插入或更新 的结果是: {result}")

    @catch_and_print_exception
    def batch_part_update_by_bulk(self, data_list: List[dict], is_only_print_sql: bool = False, id_column: str = "id"):
        action_list = []
        if data_list:
            for data in data_list:
                new_action = {"_index": self.index, "_id": data[id_column],
                              "_op_type": 'update',
                              "doc": data}
                if is_only_print_sql:
                    self.logger.info(f"将要执行的 批量 部分更新 语句是: {new_action}")
                else:
                    action_list.append(new_action)
            if action_list:
                result = helpers.bulk(self.client, action_list)
                self.logger.info(f"批量执行的 批量 部分更新 的结果是: {result}")

    @catch_and_print_exception
    def batch_insert_by_bulk(self, data_list: List[dict], is_only_print_sql: bool = False, id_column: str = "id"):
        action_list = []
        if data_list:
            for data in data_list:
                if data.get(id_column):
                    new_action = {"_index": self.index, "_id": data[id_column],
                                  "_op_type": 'create',
                                  "_source": data}
                    if is_only_print_sql:
                        self.logger.info(f"将要执行的 批量 插入 语句是: {new_action}")
                    else:
                        action_list.append(new_action)
                else:
                    self.client.index(index=self.index, body=data)

            if action_list:
                result = helpers.bulk(self.client, action_list)
                self.logger.info(f"批量执行的 批量 插入 的结果是: {result}")

    @catch_and_print_exception
    def batch_delete_by_bulk(self, data_list: List[dict], is_only_print_sql: bool = False, id_column: str = "id"):
        action_list = []
        if data_list:
            for data in data_list:
                new_action = {"_index": self.index, "_id": data[id_column],
                              "_op_type": 'delete'}
                if is_only_print_sql:
                    self.logger.info(f"将要执行的 删除 语句是: {new_action}")
                else:
                    action_list.append(new_action)
            if action_list:
                result = helpers.bulk(self.client, action_list)
                self.logger.info(f"批量执行的 删除 的结果是: {result}")

    def query_by_id(self, id_str):
        res = self.client.get(index=self.index, id=id_str)
        if res and res['hits']['total']['value'] > 0:
            return res["hits"]["hits"]

    def query_by_search(self, query_body: dict, handle_func: Callable[[List[dict], int, list], object] = None):
        self.query_index += 1
        page = self.client.search(index=self.index, size=10000, body=query_body)
        if page and page['hits']['total']['value'] > 0:
            self.logger.info(f"语句: \n {query_body} \n 查询到 有{page['hits']['total']['value']}条记录")
            if handle_func:
                page_result = page['hits']['hits']
                result_list = [hits["_source"] for hits in page_result]
                title = query_body.get("_source")
                handle_func(result_list, self.query_index, title)

    def query_by_scroll(self, query_body: dict, handle_func: Callable[[List[dict], int, list], object] = None):
        # count = self.client.count(index=self.index, body=query_body)
        # self.logger.info(f"当前 sql 查询到的 总数是: {count}")
        page = self.client.search(
            index=self.index,
            scroll='2m',
            size=10000,
            body=query_body)
        total = 0
        scroll_index = 0
        self.logger.info("Scrolling...")
        sid = page['_scroll_id']
        curr_query_size = page['hits']['total']['value']
        page_result = page['hits']['hits']
        result_list = [hits["_source"] for hits in page_result]
        if handle_func:
            title = query_body.get("_source")
            handle_func(result_list, scroll_index, title)
            total += curr_query_size
            self.logger.info(f"current query size: {len(result_list)} ")

        # Start scrolling
        while curr_query_size > 0:
            scroll_index += 1
            page = self.client.scroll(scroll_id=sid, scroll='2m')
            sid = page['_scroll_id']
            page_result = page['hits']['hits']
            result_list = [hits["_source"] for hits in page_result]
            curr_query_size = len(result_list)
            if handle_func:
                title = query_body.get("_source")
                handle_func(result_list, scroll_index, title)
                self.logger.info(f"current query size: {curr_query_size} ")
        else:
            self.logger.info(f" total size: {total} ")


class EsUtil(ES):
    def __init__(self, es_config: EsConfig):
        super().__init__(es_config=es_config)

    def batch_delete_es(self
                        , result_list: List[dict]
                        , batch_size: int = 5000
                        , is_only_print_sql: bool = False
                        , id_column: str = "id"
                        ):
        num = 0
        temp_res_list = []
        if result_list:
            for result in result_list:
                num += 1
                temp_res_list.append(result)
                if num % batch_size == 0:
                    self.batch_delete_by_bulk(
                        temp_res_list
                        , is_only_print_sql=is_only_print_sql
                        , id_column=id_column
                    )
                    temp_res_list.clear()
                    self.logger.info(f"完成 第 {num} 个 ")
            else:
                self.batch_delete_by_bulk(
                    temp_res_list
                    , is_only_print_sql=is_only_print_sql
                    , id_column=id_column
                )
                temp_res_list.clear()
                self.logger.info(f"完成 第 {num} 个 ")
        else:
            self.logger.info("没有查询到符合条件的数据")

    def batch_insert_or_update_es(self
                                  , result_list: List[dict]
                                  , is_only_print_sql: bool = False
                                  , batch_size: int = 5000
                                  , id_column: str = "id"
                                  ):
        num = 0
        temp_res_list = []
        if result_list:
            for result in result_list:
                num += 1
                temp_res_list.append(result)
                if num % batch_size == 0:
                    self.batch_insert_or_update_by_bulk(
                        temp_res_list
                        , is_only_print_sql=is_only_print_sql
                        , id_column=id_column
                    )
                    temp_res_list.clear()
                    self.logger.info(f"完成 第 {num} 个 ")
            else:
                self.batch_insert_or_update_by_bulk(
                    temp_res_list
                    , is_only_print_sql=is_only_print_sql
                    , id_column=id_column
                )
                temp_res_list.clear()
                self.logger.info(f"完成 第 {num} 个 ")
        else:
            self.logger.info("没有查询到符合条件的数据")

    def batch_update_es(self
                        , result_list: List[dict]
                        , batch_size: int = 5000
                        , is_only_print_sql: bool = False
                        , id_column: str = "id"
                        ):
        num = 0
        temp_res_list = []
        if result_list:
            for result in result_list:
                num += 1
                temp_res_list.append(result)
                if num % batch_size == 0:
                    self.batch_part_update_by_bulk(
                        temp_res_list
                        , is_only_print_sql=is_only_print_sql
                        , id_column=id_column
                    )
                    temp_res_list.clear()
                    self.logger.info(f"完成 第 {num} 个 ")
            else:
                self.batch_part_update_by_bulk(
                    temp_res_list
                    , is_only_print_sql=is_only_print_sql
                    , id_column=id_column
                )
                temp_res_list.clear()
                self.logger.info(f"完成 第 {num} 个 ")
        else:
            self.logger.info("没有查询到符合条件的数据")

    def batch_insert_es(self
                        , result_list: List[dict]
                        , batch_size: int = 5000
                        , is_only_print_sql: bool = False
                        , id_column: str = "id"
                        ):
        num = 0
        temp_res_list = []
        if result_list:
            for result in result_list:
                num += 1
                temp_res_list.append(result)
                if num % batch_size == 0:
                    self.batch_insert_by_bulk(
                        temp_res_list
                        , is_only_print_sql=is_only_print_sql
                        , id_column=id_column
                    )
                    temp_res_list.clear()
                    self.logger.info(f"完成 第 {num} 个 ")
            else:
                self.batch_insert_by_bulk(
                    temp_res_list
                    , is_only_print_sql=is_only_print_sql
                    , id_column=id_column
                )
                temp_res_list.clear()
                self.logger.info(f"完成 第 {num} 个 ")
        else:
            self.logger.info("没有查询到符合条件的数据")
