from abc import ABC, abstractmethod
import argparse
import multiprocessing
import sys
import traceback
import time
from config import Config
import re
from enum import StrEnum
import logging
from password import Password
from database import DatabaseFactory
from constant import Constant
from columnsmgr import ColumnsMgr
import polars as pl

# yapf --style='{column_limit:150}' .\demo3.py -i

# 日志配置
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')

# 同步策略：定义数据同步方式（替换、更新、删除）
class SyncStrategy(ABC):
    def __init__(self, seq_no, cur_date, basic_cfg, data_cfg):
        self.seq_no = seq_no
        self.cur_date = cur_date
        self.basic_cfg = basic_cfg
        self.data_cfg = data_cfg
        self.src_table = data_cfg.get(Constant.SRC_TABLE, '')
        self.tgt_table = data_cfg.get(Constant.TGT_TABLE, '')
        self.src_filter = data_cfg.get(Constant.SRC_TABLE_FILTER, '')
        self.src_filter = self.src_filter.replace('{cur_date}', self.cur_date)
        self.tgt_filter = data_cfg.get(Constant.TGT_TABLE_FILTER, '')
        self.tgt_filter = self.tgt_filter.replace('{cur_date}', self.cur_date)
        self.conv_columns = data_cfg.get(Constant.CONV_COLUMNS, '')
        self.custom_statement = data_cfg.get(Constant.CUSTOM_SYNC_RULE, '')
        self.source_db = None
        self.target_db = DatabaseFactory.create_database(basic_cfg[Constant.TGT_DB_TYPE], basic_cfg[Constant.TGT_DB_USER],
                                                         basic_cfg[Constant.TGT_DB_PASSWORD], basic_cfg[Constant.TGT_DB_ADDR],
                                                         basic_cfg[Constant.TGT_DB_CONN_STR])
        self.target_db.connect()

    @abstractmethod
    def sync(self):
        pass

    def destroy(self):
        """断开源数据库和目标数据库的连接。"""
        if self.source_db:
            self.source_db.disconnect()
            self.source_db = None
        if self.target_db:
            self.target_db.disconnect()
            self.target_db = None

    def _gen_delete_statement(tgt_table, tgt_filter=None):
        """
        生成 SQL 删除语句
        
        参数:
            tgt_table: 目标表名
            tgt_filter: 筛选条件，用于过滤要删除的数据（可选）
            mode: 删除模式，可选 'delete' 或 'truncate'
        
        返回:
            字典，包含状态和结果信息
        """
        # 参数验证
        if not tgt_table:
            return {"success": False, "error": "表名不能为空", "statement": None}
        
        # 处理 truncate 模式
        if tgt_filter is None or len(tgt_filter) == 0:
            # 警告：truncate 不支持 where 条件，会删除整个表的数据
            return {"success": True, "error": None, "statement": f"TRUNCATE TABLE {tgt_table}"}
        
        # 处理 delete 模式
        delete_parts = []
        delete_parts.append(f"DELETE FROM {tgt_table}")
        
        # 添加筛选条件
        if tgt_filter and len(tgt_filter) > 0:
            delete_parts.append(f"WHERE {tgt_filter}")
        
        # 添加分号结束语句
        delete_parts.append(";")
        # 组合完整语句
        statement = "\n".join(delete_parts)
        
        # 返回成功结果
        return {"success": True, "error": None, "statement": statement}


# 数据库链替换策略
class ReplaceViaDBLink(SyncStrategy):
    def __init__(self, seq_no, cur_date, basic_cfg, data_cfg, src_cols, src_uk_cols, tgt_cols, tgt_uk_cols):
        super().__init__(seq_no, cur_date, basic_cfg, data_cfg)
        self.dblink = data_cfg.get(Constant.DB_LINK)
        self.src_cols = src_cols
        self.src_uk_cols = src_uk_cols
        self.tgt_cols = tgt_cols
        self.tgt_uk_cols = tgt_uk_cols

    def sync(self):
        def replace_semicolon(s):
            pattern = r';(?=[^;])'
            replacement = ";'||CHR(10)||'"
            result = re.sub(pattern, replacement, s)
            return result

        result = self._gen_delete_statement(self.tgt_table, self.tgt_filter)
        if not result['success']:
            raise Exception(f"生成删除语句失败：{result['error']}")
        delete_statement = result['statement']

        result = self.__gen_insert_statement(self.tgt_table, self.src_table, self.tgt_cols, self.tgt_uk_cols, self.src_cols, self.src_uk_cols, self.src_filter, self.dblink)
        if not result['success']:
            return result
        insert_statement = result['statement']
        return {"success": True, "error": None, "statement": delete_statement + '\n' + insert_statement}

        sql_statements = replace_semicolon(delete_statement + '\n' + insert_statement)

        if 'TRUNCATE' in sql_statements:
            sql_statements = sql_statements.replace('TRUNCATE', 'truncate')

        statement = f"""
        DECLARE
        v_log_statement clob;
        v_count = INTEGER;
        BEGIN
            v_log_statement := '{sql_statements}';
            commit;
            {delete_statement}
            {insert_statement}
            v_count := SQL%ROWCOUNT;
            update {self.basic_cfg[Constant.TGT_DB_TYPE]}_log set log_statement = v_log_statement, log_status = '已执行', log_row_count = v_count where seq_no = {self.seq_no};
            commit;
        END;"""
        

    def __gen_insert_statement(tgt_table, src_table, tgt_all_cols, tgt_uk_cols, src_all_cols, src_uk_cols, src_filter=None, dblink=None):
        """
        生成 SQL INSERT 语句
        
        参数:
            tgt_table: 目标表名
            src_table: 源表名
            tgt_all_cols: 目标表的列名列表
            tgt_uk_cols: 目标表的主键列名列表
            src_all_cols: 源表的列名列表
            src_uk_cols: 源表的主键列名列表
            src_filter: 筛选条件，用于过滤表B的数据（可选）
            dblink: 数据库链接信息，用于表B是远程表的情况（可选）
        
        返回:
            字典，包含状态和结果信息
        """
        # 参数验证
        if not all([tgt_table, src_table, tgt_all_cols, src_all_cols]):
            return {"success": False, "error": "表名和列名列表不能为空", "statement": None}
        
        # 处理主键为空的情况
        if not tgt_uk_cols and not src_uk_cols:
            # 如果两个表的主键都为空，则认为所有列都是主键
            tgt_uk_cols = tgt_all_cols
            src_uk_cols = src_all_cols
        elif not tgt_uk_cols or not src_uk_cols:
            return {"success": False, "error": "一个表有主键而另一个表没有", "statement": None}
        
        # 验证主键数量和名称是否一致
        if len(tgt_uk_cols) != len(src_uk_cols):
            return {"success": False, "error": "两个表的主键数量不一致", "statement": None}
        
        # 检查主键兼容性：确保A的主键都是B的主键
        for pk_a in tgt_uk_cols:
            if pk_a not in src_all_cols:
                return {"success": False, "error": f"目标表的主键 '{pk_a}' 不在源表中", "statement": None}
        
        # 构建列映射关系
        column_mapping = {}
        for col_a in tgt_all_cols:
            if col_a in src_all_cols:
                column_mapping[col_a] = col_a
            else:
                return {"success": False, "error": f"目标表的列 '{col_a}' 不在源表中", "statement": None}
        
        # 验证主键映射
        for pk_a, pk_b in zip(tgt_uk_cols, src_uk_cols):
            if column_mapping.get(pk_a) != pk_b:
                return {"success": False, "error": f"主键映射不匹配 - 目标表 '{pk_a}' 映射到源表 '{pk_b}'", "statement": None}
        
        # 构建 INSERT 语句
        insert_parts = []
        
        # INSERT 头部，明确列出目标表列名
        columns_str = "(" + ", ".join(tgt_all_cols) + ")" if tgt_all_cols != src_all_cols else '' 
        insert_parts.append(f"INSERT INTO {tgt_table} {columns_str}")

        # VALUES 部分
        insert_parts.append("SELECT")
        
        # 源表列名列表，若两表列相同则用 *
        if tgt_all_cols == src_all_cols:
            insert_parts.append("  *")
        else:
            source_columns = [column_mapping[col_a] for col_a in tgt_all_cols]
            source_columns_str = ", ".join(source_columns)
            insert_parts.append(f"{source_columns_str}")
        
        # FROM 部分，处理 DBlink
        if dblink:
            # 使用 DBlink 格式的表名
            full_src_table = f"{src_table}@{dblink}"
            insert_parts.append(f"FROM {full_src_table}")
        else:
            # 常规表名处理
            insert_parts.append(f"FROM {src_table}")
        
        # WHERE 部分，处理筛选条件
        if src_filter:
            insert_parts.append(f"WHERE {src_filter}")
        
        # 添加分号结束语句
        insert_parts.append(";")
        
        # 组合完整语句
        statement = " ".join(insert_parts)
        
        # 返回成功结果
        return {"success": True, "error": None, "statement": statement}


# DBLINK合并策略
class MergeViaDBLink(SyncStrategy):
    def __init__(self, seq_no, cur_date, basic_cfg, data_cfg, src_cols, src_uk_cols, tgt_cols, tgt_uk_cols):
        super().__init__(seq_no, cur_date, basic_cfg, data_cfg)
        self.dblink = data_cfg.get(Constant.DB_LINK)
        self.src_cols = src_cols
        self.src_uk_cols = src_uk_cols
        self.tgt_cols = tgt_cols
        self.tgt_uk_cols = tgt_uk_cols

    def sync(self):
        return super().sync()


# 程序替换策略
class ReplaceViaProgram(SyncStrategy):
    def __init__(self, seq_no, cur_date, basic_cfg, data_cfg, src_cols, src_uk_cols, tgt_cols, tgt_uk_cols):
        super().__init__(seq_no, cur_date, basic_cfg, data_cfg)
        self.src_cols = src_cols
        self.src_uk_cols = src_uk_cols
        self.tgt_cols = tgt_cols
        self.tgt_uk_cols = tgt_uk_cols
        self.source_db = DatabaseFactory.create_database(basic_cfg[Constant.SRC_DB_TYPE], basic_cfg[Constant.SRC_DB_USER],
                                                         basic_cfg[Constant.SRC_DB_PASSWORD], basic_cfg[Constant.SRC_DB_ADDR],
                                                         basic_cfg[Constant.SRC_DB_CONN_STR])
        self.source_db.connect()

    def __del__(self):
        self.destroy()

    def sync(self):
        filter = f"where {self.src_filter}" if self.src_filter else ""
        query_sql = f"select * from {self.src_table} {filter}"
        chunk_size = 20000   # 每次从源表读取的行数
        source_cursor = self.source_db.connection.cursor()
        source_cursor.execute(query_sql)
        total_rows = source_cursor.rowcount

        cols_name = [row[0] for row in source_cursor.description]
        cols_type = [row[1] for row in source_cursor.description]
        placeholders = ', '.join([':' + str(i + 1) for i in range(len(cols_name))])
        insert_sql = f"insert into {self.tgt_table} ({', '.join(cols_name)}) values ({placeholders})"
        print(f"Insert SQL: {insert_sql}")
        target_cursor = self.target_db.connection.cursor()

        target_cursor.execute(f"truncate table {self.tgt_table}")

        while True:
            rows = source_cursor.fetchmany(chunk_size)
            if not rows:
                break
            converted_rows = []
            for row in rows:
                converted_row = []
                for i, value in enumerate(row):
                    if cols_name[i].upper() in self.conv_columns:
                        try:
                            converted_row.append(value.encode('gbk', errors='replace').decode('gbk'))
                        except AttributeError:
                            converted_row.append(value)
                    else:
                        converted_row.append(value)
                converted_rows.append(converted_row)
            target_cursor.executemany(insert_sql, converted_rows)
            target_cursor.connection.commit()
            print(f"Inserted {len(rows)} rows")

        source_cursor.close()
        target_cursor.close()
        print("Data synchronization completed successfully.")

        return
    
    def destroy(self):
        if self.source_db:
            self.source_db.disconnect()
            self.source_db = None
        return super().destroy()

# 程序合并策略
class MergeViaProgram(SyncStrategy):
    def __init__(self, seq_no, cur_date, basic_cfg, data_cfg, src_cols, src_uk_cols, tgt_cols, tgt_uk_cols):
        super().__init__(seq_no, cur_date, basic_cfg, data_cfg)
        self.src_cols = src_cols
        self.src_uk_cols = src_uk_cols
        self.tgt_cols = tgt_cols
        self.tgt_uk_cols = tgt_uk_cols
        self.source_db = DatabaseFactory.create_database(basic_cfg[Constant.SRC_DB_TYPE], basic_cfg[Constant.SRC_DB_USER],
                                                         basic_cfg[Constant.SRC_DB_PASSWORD], basic_cfg[Constant.SRC_DB_ADDR],
                                                         basic_cfg[Constant.SRC_DB_CONN_STR])
        self.source_db.connect()

    def sync(self):
        filter = f"where {self.src_filter}" if self.src_filter else ""
        query_sql = f"select * from {self.src_table} {filter}"
        chunk_size = 20000

        source_cursor = self.source_db.connection.cursor()
        source_cursor.execute(query_sql)
        total_rows = source_cursor.rowcount
        print(f"Total rows: {total_rows}")
        cols_name = [row[0] for row in source_cursor.description]
        cols_type = [row[1] for row in source_cursor.description]

        merge_sql = f"""
merge into {self.tgt_table} target 
using (select {', '.join([f':{i+1} AS {col}' for i, col in enumerate(cols_name)])} from dual) source
on({' and '.join([f'target.{col} = source.{col}' for col in key_columns])})
when matched then 
update set {', '.join([f'target.{col} = source.{col}' for col in cols_name])}       
when not matched then
insert ({', '.join(cols_name)}) values ({', '.join([f'source.{col}' for col in cols_name])})"""

        print(f"Merge SQL: {merge_sql}")

        target_cursor = self.target_db.connection.cursor()

        while True:
            rows = source_cursor.fetchmany(chunk_size)
            if not rows:
                break
            converted_rows = []
            for row in rows:
                converted_row = []
                for i, value in enumerate(row):
                    if cols_name[i].upper() in self.conv_columns:
                        try:
                            # 修正编码转换语法
                            converted_row.append(value.encode('gbk', errors='replace').decode('gbk'))
                        except AttributeError:
                            converted_row.append(value)
                    else:
                        converted_row.append(value)
                converted_rows.append(converted_row)
            target_cursor.executemany(merge_sql, converted_rows)
            target_cursor.connection.commit()
            print(f"Merged {len(rows)} rows")

        source_cursor.close()
        target_cursor.close()
        print("Data synchronization completed successfully.")
        return

    def destroy(self):
        if self.source_db:
            self.source_db.disconnect()
            self.source_db = None
        return super().destroy()
    
    def __del__(self):
        self.destroy()

# 删除策略
class DeleteData(SyncStrategy):
    def __init__(self, basic_cfg, data_cfg):
        super().__init__(basic_cfg, data_cfg)

    def sync(self):
        # 如果没有过滤条件，默认使用truncate快速删除表所有数据

        filter = f"where {self.tgt_filter}" if self.tgt_filter else None
        sql = f'truncate table {self.tgt_table}' if filter is None else f"delete from {self.tgt_table} {filter}"

        print(f"SQL: {sql}")

        self.target_db.execute_sql(sql) 

# 自定义同步策略
class SyncViaCustom(SyncStrategy):
    def __init__(self, basic_cfg, data_cfg):
        super().__init__(basic_cfg, data_cfg)

    def sync(self):
        return super().sync()


# 数据同步任务
def sync_task(sync_cfg, error_event, error_queue):
    seq_no = sync_cfg['seq_no']
    cur_date = sync_cfg['cur_date']
    basic_cfg = sync_cfg['basic_cfg']
    data_cfg = sync_cfg['data_cfg']
    src_cols = sync_cfg['src_cols']
    src_uk_cols = sync_cfg['src_uk_cols']
    tgt_cols = sync_cfg['tgt_cols']
    tgt_uk_cols = sync_cfg['tgt_uk_cols']

    try:
        # 创建同步策略
        if data_cfg[Constant.CUSTOM_SYNC_RULE] is not None:
            sync_strategy = SyncViaCustom(seq_no, cur_date, basic_cfg, data_cfg)
        elif data_cfg[Constant.SYNC_STRATEGY] == Constant.REPLACE and data_cfg[Constant.DB_LINK] == '-':
            sync_strategy = ReplaceViaProgram(seq_no, cur_date, basic_cfg, data_cfg, src_cols, src_uk_cols, tgt_cols, tgt_uk_cols)
        elif data_cfg[Constant.SYNC_STRATEGY] == Constant.MERGE and data_cfg[Constant.DB_LINK] == '-':
            sync_strategy = MergeViaProgram(seq_no, cur_date, basic_cfg, data_cfg, src_cols, src_uk_cols, tgt_cols, tgt_uk_cols)
        elif data_cfg[Constant.SYNC_STRATEGY] == Constant.REPLACE:
            sync_strategy = ReplaceViaDBLink(seq_no, cur_date, basic_cfg, data_cfg, src_cols, src_uk_cols, tgt_cols, tgt_uk_cols)
        elif data_cfg[Constant.SYNC_STRATEGY] == Constant.MERGE:
            sync_strategy = MergeViaDBLink(seq_no, cur_date, basic_cfg, data_cfg, src_cols, src_uk_cols, tgt_cols, tgt_uk_cols)
        elif data_cfg[Constant.SYNC_STRATEGY] == Constant.DELETE:
            sync_strategy = DeleteData(seq_no, cur_date, basic_cfg, data_cfg)
        else:
            raise ValueError(f"Invalid sync strategy: {data_cfg[Constant.SYNC_STRATEGY]}")

        sync_strategy.sync()

    except Exception as e:
        error_queue.put((data_cfg[Constant.SRC_TABLE], data_cfg[Constant.TGT_TABLE], str(e), traceback.format_exc()))
        error_event.set()  # 设置故障标志
        sys.exit(1)  # 退出当前进程
    finally:
        # 关闭连接
        sync_strategy.destroy()


# 多进程同步
def multi_process_sync(tasks):
    # 使用 Manager 管理共享状态
    manager = multiprocessing.Manager()
    error_event = manager.Event()
    error_queue = manager.Queue()

    processes = []
    for task in tasks:
        # 将 error_event 和 error_queue 传递给每个进程
        process = multiprocessing.Process(target=sync_task, args=(task, error_event, error_queue))
        processes.append(process)
        process.start()

    # 等待所有进程完成或检测到故障
    while True:
        # 检查是否有进程发生故障
        if error_event.is_set():
            print("Error detected in one or more processes. Terminating all processes.")
            for process in processes:
                if process.is_alive():
                    process.terminate()  # 强制终止所有子进程
            break

        # 检查所有进程是否已完成
        if all(not process.is_alive() for process in processes):
            break

        time.sleep(0.1)  # 避免忙等待

    # 打印错误信息
    while not error_queue.empty():
        src_table, tgt_table, error_message, traceback_info = error_queue.get()
        print(f"Error in sync from {src_table} to {tgt_table}: {error_message}")
        print(traceback_info)

    # 如果发生故障，整个程序退出
    if error_event.is_set():
        sys.exit(1)


def main():
    # 创建主解析器
    parser = argparse.ArgumentParser(
        prog="dataSync",
        description="A tool for data synchronization between databases",
        epilog="Thank you for using this program! If you have any questions, please contact support@example.com.",
        formatter_class=argparse.RawTextHelpFormatter,  # 保留换行符
        add_help=False)
    parser.add_argument('-c', "--config", help="Specify the path of the configuration file", metavar="<FILE>", default='config.xlsx')
    parser.add_argument("-v", "--version", action="version", version="%(prog)s v1.0.0")
    parser.add_argument("-h", "--help", action="store_true", help="Display help information")

    # 创建子命令解析器
    subparsers = parser.add_subparsers(title="sub commands", dest="command", required=False)

    # 子命令1: 密码转换
    parser_pwd = subparsers.add_parser("password", add_help=False)
    parser_pwd.add_argument("text", help="Password string")
    parser_pwd.add_argument("-d", "--decrypt", action="store_true", help="Decrypt the password")

    # 解析参数
    args, remaining_args = parser.parse_known_args()

    # 处理帮助选项
    if args.help:
        if args.command:
            # 显示特定子命令的帮助
            if args.command == "password":
                parser_pwd.print_help()
        else:
            # 显示主帮助
            parser.print_help()
        return

    # 处理子命令逻辑
    if args.command == "password":
        password = Password(args.text, args.decrypt)
        print(password if args.decrypt else password.encrypt())

    conf = Config(args.config)

    # 先一次性获取所有表的列和主键信息, 后面多进程处理时均使用本地缓存数据
    with DatabaseFactory(conf.base_config[Constant.SRC_DB_TYPE], conf.base_config[Constant.SRC_DB_USER], conf.base_config[Constant.SRC_DB_PASSWORD],
                         conf.base_config[Constant.SRC_DB_ADDR], conf.base_config[Constant.SRC_DB_CONN_STR]) as source_db:
        src_cols_mgr = ColumnsMgr(source_db)
        cur_date = datetime.now().strftime('%Y%m%d')

    with DatabaseFactory(conf.base_config[Constant.TGT_DB_TYPE], conf.base_config[Constant.TGT_DB_USER], conf.base_config[Constant.TGT_DB_PASSWORD],
                         conf.base_config[Constant.TGT_DB_ADDR], conf.base_config[Constant.TGT_DB_CONN_STR]) as target_db:
        tgt_cols_mgr = ColumnsMgr(target_db)

    # # 先查找出所有表的列信息和主键信息，在外面一次性把需要的数据都准备好，不用子进程单独查询，提高处理效率
    tasks = []
    for seq_no, group_cfg in enumerate(conf.data_config_groups['MEMCLEAR90_1']):
        if group_cfg.get(Constant.SYNC_SWITCH) == Constant.DISABLE:
            continue
        
        tgt_owner, tgt_table = group_cfg[Constant.TGT_TABLE].split('.')
        src_owner, src_table = group_cfg[Constant.SRC_TABLE].split('.') if '.' in group_cfg.get(Constant.SRC_TABLE) else (tgt_owner, tgt_table)
        tasks.append({
            "seq_no": seq_no,
            "cur_date": cur_date,
            "basic_cfg": conf.base_config,
            "data_cfg": group_cfg,
            "src_cols": src_cols_mgr.get_all_columns(src_owner, src_table),
            "src_uk_cols": src_cols_mgr.get_uk_columns(src_owner, src_table),
            "tgt_cols": tgt_cols_mgr.get_all_columns(tgt_owner, tgt_table),
            "tgt_uk_cols": tgt_cols_mgr.get_uk_columns(tgt_owner, tgt_table),
        })

    # 多进程执行同步任务
    multi_process_sync(tasks)


if __name__ == "__main__":
    main()
