import clickhouse_driver
import argparse
import datetime
import time
from file_processor import FileProcessor  # 导入之前实现的文件处理器类
from calc_time_range import TimeRangeCalculator
from custom_logger import PrefixDateLogger

# 创建日志记录器
logger = PrefixDateLogger("compare_filelog_chdb")

class CompareLogDb:
    def __init__(self, host='172.29.151.181', port=9000, user='default', 
                 password='Keepgo123@cys', database='db_cfx', target_dir='/home/cys/www/data_vector_supplement'):
        """
        初始化ClickHouse文件处理器
        :param host: ClickHouse主机地址
        :param port: ClickHouse端口
        :param user: ClickHouse用户名
        :param password: ClickHouse密码
        :param database: ClickHouse数据库名
        :param target_dir: 文件拷贝目标目录
        """
        self.host = host
        self.port = port
        self.user = user
        self.password = password
        self.database = database
        self.target_dir = target_dir
        self.connection = None
        self.processed_pairs = set()  # 用于记录已处理的ip和add_id组合，避免重复处理

        self.max_query_time=1
        self.max_insert_time=10

    def calculate_date_hour_ranges(self,hours):
        """
        计算从当前时间往前推指定小时数所包含的日期及每个日期的起止小时
        
        :param hours: 往前推的小时数（1-72之间）
        :return: 列表，每个元素为字典，包含日期、开始小时、结束小时
        """
        # 输入验证
        if not isinstance(hours, int) or hours < 1 or hours > 72:
            raise ValueError("小时数必须是1到72之间的整数")
        
        # 获取当前时间（结束时间）和起始时间
        end_time = datetime.datetime.now()
        start_time = end_time - datetime.timedelta(hours=hours)
        
        # 收集所有涉及的日期（去重）
        dates = set()
        current = start_time
        while current <= end_time:
            dates.add(current.date())
            current += datetime.timedelta(days=1)
        sorted_dates = sorted(dates)  # 按日期排序
        
        # 计算每个日期的起止小时
        result = []
        for date in sorted_dates:
            # 该日期的起始时间（0点）
            date_start = datetime.datetime.combine(date, datetime.time.min)
            # 该日期的结束时间（23:59:59）
            date_end = datetime.datetime.combine(date, datetime.time.max)
            
            # 计算实际开始小时：取start_time和该日期0点的最大值
            actual_start = max(start_time, date_start)
            start_hour = actual_start.hour
            
            # 计算实际结束小时：取end_time和该日期23点的最小值
            actual_end = min(end_time, date_end)
            end_hour = actual_end.hour
            
            result.append({
                "date": date.strftime("%Y-%m-%d"),  # 日期字符串
                "start_hour": start_hour,           # 该日期内的开始小时
                "end_hour": end_hour                # 该日期内的结束小时
            })
        
        return result

    def connect(self):
        """建立与ClickHouse的连接"""
        logger.info(self.host)
        try:
            self.connection = clickhouse_driver.connect(
                host=self.host,
                port=self.port,
                user=self.user,
                password=self.password,
                database=self.database
            )
            logger.info("成功连接到ClickHouse")
            return True
        except Exception as e:
            logger.info(f"连接ClickHouse失败: {str(e)}")
            self.connection = None
            return False

    def disconnect(self):
        """关闭与ClickHouse的连接"""
        if self.connection:
            self.connection.close()
            self.connection = None
            logger.info("关闭ClickHouse连接")

    def query_missing_compare_data_active(self, date, hour_start, hour_end):
        """
        从t_monitor_log_collect_compare表查询符合条件的数据
        :param date: 日期，格式如'2023-10-01'
        :param hour_start: 开始小时，如8
        :param hour_end: 结束小时，如10
        :return: 查询结果列表
        """
        if not self.connection:
            raise ValueError("ClickHouse连接未初始化，请先调用connect()方法")
        # 记录开始时间
        start_time = time.time()
        # 构建查询SQL
        query = f"""
            select count()
            from db_cfx.t_mysql_file_line_counts as a 
            ANTI left join db_cfx.t_monitor_log_collect_compare as d
            on a.ip=d.ip and a.app_id =d.app_id and a.event_id=d.event_id and toDate(a.date)=d.date and a.hour=d.hour
            where a.date='{date}' and a.hour >= {hour_start} and a.hour <= {hour_end}
            and a.event_id in ('systemui_start','widget_text_click_detail','negative_systemui_start','launcher_start')
        """
        logger.info(f"query_missing_compare_data_active:"+query)
        try:
            cursor = self.connection.cursor()
            cursor.execute(query)
            result = cursor.fetchall()
            cnt=result[0][0]
            # 记录结束时间
            end_time = time.time()
            if end_time-start_time > self.max_query_time :
                cnt=-1
            logger.info(f"查询active数据结果 {result} 查询到 {cnt} 条符合条件的数据 cost:{end_time - start_time:.3f} s")
            return cnt
        except Exception as e:
            logger.info(f"查询query_missing_compare_data_active失败: {str(e)}")
            raise

    def insert_missing_compare_data_active(self, date, hour_start, hour_end):
        """
        插入t_monitor_log_collect_compare表
        """
        if not self.connection:
            raise ValueError("ClickHouse连接未初始化，请先调用connect()方法")
        # 记录开始时间
        start_time = time.time()
        # 构建查询SQL
        query = f"""
            insert into db_cfx.t_monitor_log_collect_compare
            with
                ch_count as (
                select log_source_ip,base_app_id,base_k,base_create_date ,base_create_hour as hour,count() as cnt from db_cfx.t_log_base_data_standard_active
                    where base_create_date='{date}' and base_create_hour>={hour_start} and base_create_hour<={hour_end}
                    group by log_source_ip,base_app_id,base_k,base_create_date ,base_create_hour
                ),
                mysql_count as (
                select a.ip,a.app_id,a.event_id,a.date,a.hour,a.line_count as log_cnt
                    from db_cfx.t_mysql_file_line_counts as a 
                    ANTI left join db_cfx.t_monitor_log_collect_compare as d
                    on a.ip=d.ip and a.app_id =d.app_id and a.event_id=d.event_id and toDate(a.date)=d.date and a.hour=d.hour
                    where a.date='{date}' and a.hour >= {hour_start} and a.hour <= {hour_end} 
                    and a.event_id in ('systemui_start','widget_text_click_detail','negative_systemui_start','launcher_start')
                )
            select a.*,ch.cnt as ch_cnt,(a.log_cnt -ch.cnt) as diff,ch.cnt/a.log_cnt as completeness_ratio,'t_log_base_data_standard_active' as table_name
            from mysql_count as a
            left join ch_count as ch
            on ch.log_source_ip=a.ip and ch.base_app_id=a.app_id and ch.base_k = a.event_id and ch.base_create_date=toDate(a.date) and ch.hour=a.hour

        """
        logger.info("insert_missing_compare_data_active:"+query)
        try:
            cursor = self.connection.cursor()
            cursor.execute(query)
            # 记录结束时间
            end_time = time.time()

            logger.info(f"active插入完成 cost:{end_time - start_time:.3f} s")
            if float(end_time-start_time) > self.max_insert_time :
               return False
        except Exception as e:
            logger.info(f"insert_missing_compare_data_active失败: {str(e)}")
            raise
        return True

    def query_missing_compare_data_passive_active(self, date, hour_start, hour_end):
        """
        从t_monitor_log_collect_compare表查询符合条件的数据
        :param date: 日期，格式如'2023-10-01'
        :param hour_start: 开始小时，如8
        :param hour_end: 结束小时，如10
        :return: 查询结果列表
        """
        if not self.connection:
            raise ValueError("ClickHouse连接未初始化，请先调用connect()方法")
        # 记录开始时间
        start_time = time.time()
        # 构建查询SQL
        query = f"""
            select count()
            from db_cfx.t_mysql_file_line_counts as a 
            ANTI left join db_cfx.t_monitor_log_collect_compare as d
            on a.ip=d.ip and a.app_id =d.app_id and a.event_id=d.event_id and toDate(a.date)=d.date and a.hour=d.hour
            where a.date='{date}' and a.hour >= {hour_start} and a.hour <= {hour_end}
            and a.event_id ='server_active_event'
        """
        
        try:
            cursor = self.connection.cursor()
            cursor.execute(query)
            result = cursor.fetchall()
            cnt=result[0][0]
            end_time = time.time()
            if end_time-start_time > self.max_query_time :
                cnt=-1
            logger.info(f"查询passive_active数据，结果 {result} 查询到 {cnt} 条符合条件的数据 cost:{end_time - start_time:.3f} s")
            return cnt
        except Exception as e:
            logger.info(f"查询query_missing_compare_data_passive_active失败: {str(e)}")
            raise

    def insert_missing_compare_data_passive_active(self, date, hour_start, hour_end):
        """
        插入t_monitor_log_collect_compare表
        """
        if not self.connection:
            raise ValueError("ClickHouse连接未初始化，请先调用connect()方法")
        start_time = time.time()
        # 构建查询SQL
        query = f"""
            insert into db_cfx.t_monitor_log_collect_compare
            with
                ch_count as (
                select log_source_ip,base_app_id,base_k,base_create_date ,base_create_hour as hour,count() as cnt from db_cfx.t_log_base_data_standard_passive_active
                    where base_create_date='{date}' and base_create_hour>={hour_start} and base_create_hour<={hour_end}
                    group by log_source_ip,base_app_id,base_k,base_create_date ,base_create_hour
                ),
                mysql_count as (
                select a.ip,a.app_id,a.event_id,a.date,a.hour,a.line_count as log_cnt
                    from db_cfx.t_mysql_file_line_counts as a 
                    ANTI left join db_cfx.t_monitor_log_collect_compare as d
                    on a.ip=d.ip and a.app_id =d.app_id and a.event_id=d.event_id and toDate(a.date)=d.date and a.hour=d.hour
                    where a.date='{date}' and a.hour >= {hour_start} and a.hour <= {hour_end}
                    and a.event_id ='server_active_event'
                )
            select a.*,ch.cnt as ch_cnt,(a.log_cnt -ch.cnt) as diff,ch.cnt/a.log_cnt as completeness_ratio,'t_log_base_data_standard_passive_active' as table_name
            from mysql_count as a
            left join ch_count as ch
            on ch.log_source_ip=a.ip and ch.base_app_id=a.app_id and ch.base_k = a.event_id and ch.base_create_date=toDate(a.date) and ch.hour=a.hour

        """
        logger.info("insert_missing_compare_data_active:"+query)
        try:
            cursor = self.connection.cursor()
            cursor.execute(query)
            end_time = time.time()
            logger.info(f"passive_active插入完成 cost:{end_time - start_time:.3f} s")
            if end_time-start_time > self.max_insert_time :
               return False
        except Exception as e:
            logger.info(f"insert_missing_compare_data_passive_active失败: {str(e)}")
            raise
        return True

    def query_missing_compare_data_widget_show(self, date, hour_start, hour_end):
        """
        从t_monitor_log_collect_compare表查询符合条件的数据
        :param date: 日期，格式如'2023-10-01'
        :param hour_start: 开始小时，如8
        :param hour_end: 结束小时，如10
        :return: 查询结果列表
        """
        if not self.connection:
            raise ValueError("ClickHouse连接未初始化，请先调用connect()方法")
        start_time = time.time()
        # 构建查询SQL
        query = f"""
            select count()
            from db_cfx.t_mysql_file_line_counts as a 
            ANTI left join db_cfx.t_monitor_log_collect_compare as d
            on a.ip=d.ip and a.app_id =d.app_id and a.event_id=d.event_id and toDate(a.date)=d.date and a.hour=d.hour
            where a.date='{date}' and a.hour >= {hour_start} and a.hour <= {hour_end}
            and a.event_id ='widget_show'
        """
        
        try:
            cursor = self.connection.cursor()
            cursor.execute(query)
            result = cursor.fetchall()
            cnt=result[0][0]
            end_time = time.time()
            if end_time-start_time > self.max_query_time :
                cnt=-1
            logger.info(f"查询widget_show数据，结果 {result} 查询到 {cnt} 条符合条件的数据 cost:{end_time - start_time:.3f} s")
            return cnt
        except Exception as e:
            logger.info(f"查询query_missing_compare_data_widget_show失败: {str(e)}")
            raise

    def insert_missing_compare_data_widget_show(self, date, hour_start, hour_end):
        """
        插入t_monitor_log_collect_compare表
        """
        if not self.connection:
            raise ValueError("ClickHouse连接未初始化，请先调用connect()方法")
        start_time = time.time()
        # 构建查询SQL
        query = f"""
            insert into db_cfx.t_monitor_log_collect_compare
            with
                ch_count as (
                select log_source_ip,base_app_id,base_k,base_create_date ,base_create_hour as hour,count() as cnt from db_cfx.t_log_base_data_standard_widget_show
                    where base_create_date='{date}' and base_create_hour>={hour_start} and base_create_hour<={hour_end}
                    group by log_source_ip,base_app_id,base_k,base_create_date ,base_create_hour
                ),
                mysql_count as (
                select a.ip,a.app_id,a.event_id,a.date,a.hour,a.line_count as log_cnt
                    from db_cfx.t_mysql_file_line_counts as a 
                    ANTI left join db_cfx.t_monitor_log_collect_compare as d
                    on a.ip=d.ip and a.app_id =d.app_id and a.event_id=d.event_id and toDate(a.date)=d.date and a.hour=d.hour
                    where a.date='{date}' and a.hour >= {hour_start} and a.hour <= {hour_end}
                    and a.event_id ='widget_show'
                )
            select a.*,ch.cnt as ch_cnt,(a.log_cnt -ch.cnt) as diff,ch.cnt/a.log_cnt as completeness_ratio,'t_log_base_data_standard_widget_show' as table_name
            from mysql_count as a
            left join ch_count as ch
            on ch.log_source_ip=a.ip and ch.base_app_id=a.app_id and ch.base_k = a.event_id and ch.base_create_date=toDate(a.date) and ch.hour=a.hour

        """
        logger.info("insert_missing_compare_data_active:"+query)
        try:
            cursor = self.connection.cursor()
            cursor.execute(query)
            end_time = time.time()
            logger.info(f"widget_show插入完成 cost:{end_time - start_time:.3f} s")
            if end_time-start_time > self.max_insert_time :
                return False
        except Exception as e:
            logger.info(f"insert_missing_compare_data_widget_show失败: {str(e)}")
            raise
        return True

    def query_missing_compare_data(self, date, hour_start, hour_end):
        """
        从t_monitor_log_collect_compare表查询符合条件的数据
        :param date: 日期，格式如'2023-10-01'
        :param hour_start: 开始小时，如8
        :param hour_end: 结束小时，如10
        :return: 查询结果列表
        """
        if not self.connection:
            raise ValueError("ClickHouse连接未初始化，请先调用connect()方法")
        start_time = time.time()
        # 构建查询SQL
        query = f"""
            select count()
            from db_cfx.t_mysql_file_line_counts as a 
            ANTI left join db_cfx.t_monitor_log_collect_compare as d
            on a.ip=d.ip and a.app_id =d.app_id and a.event_id=d.event_id and toDate(a.date)=d.date and a.hour=d.hour
            where a.date='{date}' and a.hour >= {hour_start} and a.hour <= {hour_end} 
        """
        
        try:
            cursor = self.connection.cursor()
            cursor.execute(query)
            result = cursor.fetchall()
            cnt=result[0][0]
            end_time = time.time()
            if end_time-start_time > self.max_query_time :
                cnt=-1
            logger.info(f"查询all_event数据，结果 {result} 查询到 {cnt} 条符合条件的数据 cost:{end_time - start_time:.3f} s")
            return cnt
        except Exception as e:
            logger.info(f"查询query_missing_compare_data_all_event失败: {str(e)}")
            raise

    def insert_missing_compare_data(self, date, hour_start, hour_end):
        """
        插入t_monitor_log_collect_compare表
        """
        if not self.connection:
            raise ValueError("ClickHouse连接未初始化，请先调用connect()方法")
        start_time = time.time()
        # 构建查询SQL
        query = f"""
            insert into db_cfx.t_monitor_log_collect_compare
            with
                ch_count as (
                select log_source_ip,base_app_id,base_k,base_create_date ,base_create_hour as hour,count() as cnt from db_cfx.t_log_base_data_standard
                    where base_create_date='{date}' and base_create_hour>={hour_start} and base_create_hour<={hour_end}
                    and base_k  in (select distinct k from db_cfx.t_mysql_data_switch_item where file_name ='')
                    group by log_source_ip,base_app_id,base_k,base_create_date ,base_create_hour
                ),
                mysql_count as (
                select a.ip,a.app_id,a.event_id,a.date,a.hour,a.line_count as log_cnt
                    from db_cfx.t_mysql_file_line_counts as a 
                    ANTI left join db_cfx.t_monitor_log_collect_compare as d
                    on a.ip=d.ip and a.app_id =d.app_id and a.event_id=d.event_id and toDate(a.date)=d.date and a.hour=d.hour
                    where a.date='{date}' and a.hour >= {hour_start} and a.hour <= {hour_end} and a.app_id!=a.event_id 
                    and event_id in (select distinct event_id from db_cfx.t_monitor_log_collect_compare where date=today()-1 and table_name ='t_log_base_data_standard')
                )
            select a.*,ch.cnt as ch_cnt,(a.log_cnt -ch.cnt) as diff,ch.cnt/a.log_cnt as completeness_ratio,'t_log_base_data_standard' as table_name
            from mysql_count as a
            left join ch_count as ch
            on ch.log_source_ip=a.ip and ch.base_app_id=a.app_id and ch.base_k = a.event_id and ch.base_create_date=toDate(a.date) and ch.hour=a.hour

        """
        logger.info("insert_missing_compare_data_active:"+query)
        try:
            cursor = self.connection.cursor()
            cursor.execute(query)
            end_time = time.time()
            logger.info(f"all_event插入完成 cost:{end_time - start_time:.3f} s")
            if end_time-start_time > self.max_insert_time :
                return False
        except Exception as e:
            logger.info(f"insert_missing_compare_data_all_event失败: {str(e)}")
            raise
        start_time = time.time()
        # 构建查询SQL
        query = f"""
            insert into db_cfx.t_monitor_log_collect_compare
            with
                ch_count as (
                select log_source_ip,base_app_id,base_create_date ,base_create_hour as hour,count() as cnt from db_cfx.t_log_base_data_standard
                    where base_create_date='{date}' and base_create_hour>={hour_start} and base_create_hour<={hour_end}
                    and base_k  in (select distinct k from db_cfx.t_mysql_data_switch_item where file_name !='')
                    group by log_source_ip,base_app_id,base_create_date ,base_create_hour
                ),
                mysql_count as (
                select a.ip,a.app_id,a.event_id,a.date,a.hour,a.line_count as log_cnt
                    from db_cfx.t_mysql_file_line_counts as a 
                    ANTI left join db_cfx.t_monitor_log_collect_compare as d
                    on a.ip=d.ip and a.app_id =d.app_id and a.event_id=d.event_id and toDate(a.date)=d.date and a.hour=d.hour
                    where a.date='{date}' and a.hour >= {hour_start} and a.hour <= {hour_end} and a.app_id=a.event_id 
                )
            select a.*,ch.cnt as ch_cnt,(a.log_cnt -ch.cnt) as diff,ch.cnt/a.log_cnt as completeness_ratio,'t_log_base_data_standard' as table_name
            from mysql_count as a
            left join ch_count as ch
            on ch.log_source_ip=a.ip and ch.base_app_id=a.app_id and  ch.base_create_date=toDate(a.date) and ch.hour=a.hour

        """
        logger.info("insert_missing_compare_data_active:"+query)
        try:
            cursor = self.connection.cursor()
            cursor.execute(query)
            end_time = time.time()
            logger.info(f"all_event 合并后事件插入完成 cost:{end_time - start_time:.3f} s")
            if end_time-start_time > self.max_insert_time :
                return False
        except Exception as e:
            logger.info(f"insert_missing_compare_data_all_event失败: {str(e)}")
            raise
        return True

    def delete_compare_diff_data(self, date, hour_start, hour_end):
            """
            从t_monitor_log_collect_compare表查询符合条件的数据
            :param date: 日期，格式如'2023-10-01'
            :param hour_start: 开始小时，如8
            :param hour_end: 结束小时，如10
            :return: 查询结果列表
            """
            if not self.connection:
                raise ValueError("ClickHouse连接未初始化，请先调用connect()方法")

            # 构建查询SQL
            query = f"""
                alter table db_cfx.t_monitor_log_collect_compare
                delete where date='{date}'
                and hour >= {hour_start}
                and hour <= {hour_end}
                and completeness_ratio<0.995
            """

            try:
                cursor = self.connection.cursor()
                cursor.execute(query)
                logger.info(f"删除diff > 0的数据完成")
            except Exception as e:
                logger.info(f"查询query_missing_compare_data_all_event失败: {str(e)}")
                raise

    def process_data(self, date, hour_start, hour_end):
        """
        处理文件拷贝流程
        :param date: 日期，格式如'2023-10-01'
        :param hour_start: 开始小时，如8
        :param hour_end: 结束小时，如10
        """
        if not self.connection:
            logger.info("未连接到ClickHouse，尝试自动连接...")
            if not self.connect():
                logger.info("连接失败，无法继续处理")
                return
        self.delete_compare_diff_data(date, hour_start, hour_end)
        
        try:
            # 查询active数据
            cnt = self.query_missing_compare_data_active(date, hour_start, hour_end)
            logger.info(f"查询到 {cnt} 条符合条件的数据")
            if cnt >= 0:
                logger.info("查询到符合条件的数据，处理数据")
                if self.insert_missing_compare_data_active(date, hour_start, hour_end) == False:
                    logger.info("insert_missing_compare_data_active time out , quit task")
                    return
            else :
                logger.info("query_missing_compare_data_active time out , quit task")
                return

                
        except Exception as e:
            logger.info(f"active数据处理过程中发生错误: {str(e)}")

        time.sleep(10)

        #passive_active数据
        try:
            # 查询数据
            cnt = self.query_missing_compare_data_passive_active(date, hour_start, hour_end)
            logger.info(f"查询到 {cnt} 条符合条件的数据")
            if cnt >= 0:
                logger.info("查询到符合条件的数据，处理数据")
                if self.insert_missing_compare_data_passive_active(date, hour_start, hour_end) == False:
                    logger.info("insert_missing_compare_data_passive_active time out , quit task")
                    return
            else :
                logger.info("query_missing_compare_data_passive_active time out , quit task")
                return
        except Exception as e:
            logger.info(f"passive_active数据处理过程中发生错误: {str(e)}")

        time.sleep(10)


        #widget_show数据
        try:
            # 查询数据
            cnt = self.query_missing_compare_data_widget_show(date, hour_start, hour_end)
            logger.info(f"查询到 {cnt} 条符合条件的数据")
            if cnt >= 0:
                logger.info("查询到符合条件的数据，处理数据")
                if self.insert_missing_compare_data_widget_show(date, hour_start, hour_end) == False:
                     logger.info("insert_missing_compare_data_widget_show time out , quit task")
                     return
            else :
                logger.info("query_missing_compare_data_widget_show time out , quit task")
                return

        except Exception as e:
            logger.info(f"widget_show数据处理过程中发生错误: {str(e)}")
        #all_event数据
        try:
            # 查询数据
            cnt = self.query_missing_compare_data(date, hour_start, hour_end)
            logger.info(f"查询到 {cnt} 条符合条件的数据")
            if cnt >= 0:
                logger.info("查询到符合条件的数据，处理数据")

                if self.insert_missing_compare_data(date, hour_start, hour_end) == False:
                    logger.info("insert_missing_compare_data time out , quit task")
                    return
                else :
                    logger.info("query_missing_compare_data time out , quit task")
                    return

        except Exception as e:
            logger.info(f"all_event数据处理过程中发生错误: {str(e)}")

        time.sleep(10)






    def run(self, date, hour_start, hour_end):
        """
        执行完整的处理流程：连接 -> 处理 -> 断开连接
        :param date: 日期，格式如'2023-10-01'
        :param hour_start: 开始小时，如8
        :param hour_end: 结束小时，如10
        """
        try:
            if not self.connect():
                return
            self.process_data(date, hour_start, hour_end)
        finally:
            self.disconnect()

if __name__ == "__main__":
    # 解析命令行参数
    parser = argparse.ArgumentParser(description='从ClickHouse读取数据并处理文件拷贝')
    
    # ClickHouse连接参数
    parser.add_argument('--ch-host', default='172.29.151.181', help='ClickHouse主机地址')
    parser.add_argument('--ch-port', type=int, default=9000, help='ClickHouse端口')
    parser.add_argument('--ch-user', default='default', help='ClickHouse用户名')
    parser.add_argument('--ch-password', default='Keepgo123@cys', help='ClickHouse密码')
    parser.add_argument('--ch-db', default='db_cfx', help='ClickHouse数据库名')
    
    # 查询参数
    parser.add_argument('--date', required=True, help='查询日期，格式如2023-10-01')
    parser.add_argument('--hour-start', type=int, default=0, help='开始小时，如8')
    parser.add_argument('--hour-end', type=int, default=23, help='结束小时，如10')
    parser.add_argument('--hour-num', type=int, default=0, help='处理最近N小时，1-72之间，优先级高于date/hour-start/hour-end')
    
    # 目标目录
    parser.add_argument('--target-dir', default='/home/cys/www/data_vector_supplement', help='文件拷贝目标目录')
    
    args = parser.parse_args()
    
    # 创建处理器实例并执行
    processor = CompareLogDb(
        host=args.ch_host,
        port=args.ch_port,
        user=args.ch_user,
        password=args.ch_password,
        database=args.ch_db,
        target_dir=args.target_dir
    )
    if args.hour_num>0:
        logger.info(f"从当前时间往前推{args.hour_num}小时，包含以下日期及对应小时范围：")
        calculator = TimeRangeCalculator(4)
        ranges = calculator.get_time_ranges(args.hour_num)
        logger.info(f"ranges:{ranges}")
        for item in ranges:
            logger.info(f"处理->日期：{item['date']}，开始小时：{item['start_hour']}，结束小时：{item['end_hour']}")
            processor.run(item['date'], item['start_hour'], item['end_hour'])
    else:
        processor.run(args.date, args.hour_start, args.hour_end)

