from abc import abstractmethod
from typing import Callable, List

from sam.util.dbUtil import DB, parse_query_result, DEFAULT_PROCESS_DB_CONNECTION_CONFIG
from sam.util.fileUtil2 import write_file_quick
from sam.wrapper.CommonWrapper import catch_and_print_exception


class Db2File(DB):
    def __init__(self, config=DEFAULT_PROCESS_DB_CONNECTION_CONFIG):
        super().__init__(config=config)

    @catch_and_print_exception
    def export_query(self, sql: str, export_name: str = "query_db_export", file_type: str = "csv", batch_size: int = 0,
                     column: list = None, result_formatter: Callable[[list, list], List[dict]] = None):
        self.cursor.execute(sql)
        query_result = self.cursor.fetchall()
        export_result = parse_query_result(query_result, datetime_formatter="str", optional="list",
                                           result_formatter=result_formatter)
        if export_result:
            if batch_size:
                total_size = len(export_result)
                offset = 0
                next_offset = offset + batch_size
                while next_offset < total_size:
                    _export_result = export_result[offset: next_offset]
                    _export_name = f"{export_name}-({offset}-{next_offset})"
                    if column:
                        _export_result.insert(0, column)
                    write_file_quick(_export_result, _export_name, file_type=file_type)
                    offset = next_offset
                    next_offset += batch_size
                else:
                    _export_result = export_result[offset:  next_offset]
                    _export_name = f"{export_name}-({offset}-{total_size})"
                    if column:
                        _export_result.insert(0, column)
                    write_file_quick(_export_result, _export_name, file_type=file_type)
            else:
                if column:
                    export_result.insert(0, column)
                write_file_quick(export_result, export_name, file_type=file_type)
        else:
            self.logger.info(f"{export_name}, 查询结果为空")


class Db2File2(DB):
    def __init__(self, config=DEFAULT_PROCESS_DB_CONNECTION_CONFIG):
        super().__init__(config=config)
        self.current_loop_index = 0
        self.total_loop_num = 0

    def _get_db_data_by_generator(self
                                  , table_name
                                  , where: str = None
                                  , column: list = None
                                  , column_str: str = None
                                  , batch_size: int = 10000
                                  , optional: str = "list"
                                  , distinct: bool = False
                                  , datetime_formatter: str = None
                                  , result_formatter=None
                                  , is_only_print_sql: bool = False
                                  , is_fixed_start: bool = False
                                  ):
        if where and "status" in where:
            if not is_fixed_start:
                msg = "查询语句中使用了状态条件，但是没有查询固定开始位置"
                raise RuntimeWarning(msg)
            pass
        elif is_fixed_start:
            # msg = "查询语句中固定了开始位置，但是查询条件中没有状态筛选条件"
            # raise RuntimeWarning(msg)
            pass

        total_count = self.count(table_name, where=where, column=column, column_str=column_str, distinct=distinct)
        max_count = total_count - 1 + batch_size
        self.total_loop_num = max_count // batch_size
        self.logger.info(f"从数据库中查询到 共有 {total_count} 条数据要处理")

        _start = 0
        _batch_index = 0
        _end = _start + batch_size
        while _end <= max_count:
            # is_fixed_start == True 这种情况 适用于 被查询结果 可能允许被修改的情况
            if is_fixed_start:
                __start = 0
            else:
                __start = _start
            yield self.query(
                table_name
                , where=where
                , column=column
                , column_str=column_str
                , start=__start
                , limit=batch_size
                , optional=optional
                , distinct=distinct
                , datetime_formatter=datetime_formatter
                , result_formatter=result_formatter
                , is_only_print_sql=is_only_print_sql
            )
            _start += batch_size
            _end += batch_size
            _batch_index += 1
            self.current_loop_index = _batch_index

    def batch_export_db_data_by_generator(self
                                          , table_name: str
                                          , where: str = None
                                          , column: list = None
                                          , column_str: str = None
                                          , batch_size: int = 10000
                                          , distinct: bool = False
                                          , datetime_formatter: str = None
                                          , optional: str = "list"
                                          , is_fixed_start: bool = False
                                          , export_name: str = "query_db_export"
                                          , file_type: str = "csv"
                                          , export_column_str: str = None
                                          ):
        loop = self._get_db_data_by_generator(
            table_name, where=where, column=column, column_str=column_str, batch_size=batch_size,
            distinct=distinct, optional=optional, is_fixed_start=is_fixed_start
            , datetime_formatter=datetime_formatter
        )
        export_column_str = export_column_str if export_column_str else column_str
        export_column_list = export_column_str.split(",")
        export_column_list = [col.split(" as ")[1].strip() if " as " in col else col.strip() for col in
                              export_column_list]
        while True:
            try:
                one_generator_return_list = next(loop)
                handle_result_dict_list = self._handle_one_generator_return_data(one_generator_return_list)
                export_list = self.assemble_export_list(export_column_list, handle_result_dict_list)
                write_file_quick(export_list, export_name, optional="append", file_type=file_type)
                self.logger.info(f"共 {self.total_loop_num} 个批次, 当前是第 {self.current_loop_index} 个批次")
            except StopIteration:
                break

    def assemble_export_list(self, export_column_list: list, handle_result_dict_list: list) -> list:
        export_list = []
        if self.current_loop_index == 0:
            export_list.append(export_column_list)
        for handle_result_dict in handle_result_dict_list:
            export_single = []
            for col in export_column_list:
                export_single.append(handle_result_dict.get(col, ""))
            export_list.append(export_single)

        return export_list

    @abstractmethod
    def _handle_one_generator_return_data(self, one_generator_return_list) -> list:
        pass
