#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import pymysql
from clickhouse_driver import Client
import logging
import time
import sys
from typing import List, Dict, Set
from datetime import datetime, timedelta

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s',
    handlers=[
        logging.FileHandler('aigc_article_traffic_update.log'),
        logging.StreamHandler()
    ]
)
logger = logging.getLogger(__name__)

# MySQL连接配置 - 更新为gitcode数据库
MYSQL_CONFIG = {
    'host': 'f966c4f186fe49dbb1a2b8566b6660c4in01.internal.cn-north-4.mysql.rds.myhuaweicloud.com',
    'port': 3306,
    'user': 'github_repo_article_rw',
    'password': 'xLmr0GCPzRDxok3jW0QB',
    'database': 'gitcode',
    'charset': 'utf8mb4',
    'autocommit': False,
    'max_allowed_packet': 1073741824,  # 1GB
    'connect_timeout': 600,
    'read_timeout': 600,
    'write_timeout': 600
}

# ClickHouse连接配置
CLICKHOUSE_CONFIG = {
    'host': '10.205.50.181',
    'port': 9000,
    'user': 'ck',
    'password': 'PpnXhU5C57DK',
    'database': 'default',
    'settings': {
        'connect_timeout': 600,
        'receive_timeout': 600,
        'send_timeout': 600,
        'keep_alive_timeout': 300,
        'tcp_keep_alive': True,
        'tcp_keep_alive_interval': 30,
        'tcp_keep_alive_probes': 5,
        'max_block_size': 50000,
        'max_threads': 4,
        'use_compression': False,
        'max_insert_block_size': 50000
    }
}

# 同步配置
SYNC_CONFIG = {
    'batch_size': 1000,         # 每次处理1000条
    'max_retries': 3,           # 最大重试次数
    'retry_delay': 5,           # 重试延迟（秒）
    'start_date': '2023-08-01', # 开始日期
    'end_date': '2025-08-13'    # 结束日期
}

def get_mysql_connection():
    """获取MySQL连接"""
    try:
        connection = pymysql.connect(**MYSQL_CONFIG)
        logger.info("MySQL连接成功 - 数据库: gitcode, 表: github_repo_article_rw")
        return connection
    except Exception as e:
        logger.error(f"MySQL连接失败: {e}")
        raise

def get_clickhouse_client():
    """获取ClickHouse客户端"""
    try:
        client = Client(**CLICKHOUSE_CONFIG)
        client.execute("SELECT 1")
        logger.info("ClickHouse连接成功")
        return client
    except Exception as e:
        logger.error(f"ClickHouse连接失败: {e}")
        raise

def generate_date_range():
    """生成日期范围（倒序）"""
    start_date = datetime.strptime(SYNC_CONFIG['start_date'], '%Y-%m-%d')
    end_date = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
    
    dates = []
    current_date = end_date
    while current_date >= start_date:
        dates.append(current_date.strftime('%Y-%m-%d'))
        current_date -= timedelta(days=1)
    
    logger.info(f"生成日期范围: {len(dates)} 天，从 {end_date.strftime('%Y-%m-%d')} 到 {start_date.strftime('%Y-%m-%d')}")
    return dates

def fetch_clickhouse_data_by_date(client: Client, date: str) -> List[Dict]:
    """根据指定日期从ClickHouse获取AIGC文章流量数据"""
    for attempt in range(SYNC_CONFIG['max_retries']):
        try:
            # 构建查询SQL
            query = """
            WITH repo_aigc_article AS (
                SELECT article_id, article_title, upload_at,
                concat('https://blog.csdn.net/', blog_own_name, '/article/details/', article_id) as blog_url,
                article_title, repo_id
                FROM ct259_ods_github_repo_article 
                WHERE status = 1 AND toDate(upload_at) = %(date)s
            ), article_view_data AS (
                SELECT article_id, min(pt) as first_pv_date,
                max(pt) AS last_pv_date,
                sum(total_pv) as total_pv_sum, sum(total_uv) as total_uv_sum,
                count(*) as record_count
                FROM ct259_ods_csdn_article_data_pro a 
                GLOBAL LEFT JOIN repo_aigc_article b ON toUInt64(a.article_id) = toUInt64(b.article_id)
                WHERE a.total_pv > 0 AND toUInt64(b.article_id) > 0
                GROUP BY a.article_id
            ), project_view_data AS (
                SELECT count(cid) as pv,
                count(DISTINCT cid) as uv,
                min(a.pt) AS first_pv_date,
                max(a.pt) AS last_pv_date,
                a.from_id as article_id
                FROM ct259_dws_project_nginx_browsing_history_report a
                GLOBAL LEFT JOIN repo_aigc_article b ON toUInt64(a.from_id) = toUInt64(b.article_id)
                WHERE toUInt64(b.article_id) > 0 AND a.`ref` like 'https://link.csdn.net%%' AND a.project_id = b.repo_id
                GROUP BY a.from_id
            ), user_reg AS (
                SELECT b.article_id as article_id,
                count(username) as user_num,
                min(b.pt) AS first_pv_date,
                max(b.pt) AS last_pv_date
                FROM ct259_ods_github_repo_article a
                GLOBAL LEFT JOIN ct259_dwd_user_full_info_wide b ON a.article_id = b.article_id
                WHERE b.article_id IS NOT NULL AND b.article_id != '' AND toUInt64(b.article_id) > 0
                GROUP BY article_id
            ) 
            SELECT a.article_id AS article_id,
                a.first_pv_date AS csdn_first_traffic_time,
                a.total_pv_sum AS article_cumulative_pv,
                a.total_uv_sum AS article_cumulative_uv,
                a.last_pv_date AS csdn_traffic_last_update_time,
                c.first_pv_date AS gitcode_first_traffic_time,
                c.pv AS project_cumulative_pv,
                c.uv AS project_cumulative_uv,
                c.last_pv_date AS gitcode_traffic_last_update_time,
                d.first_pv_date AS first_user_register_time,
                d.user_num AS cumulative_register_users,
                d.last_pv_date AS register_users_last_update_time
            FROM article_view_data a 
            GLOBAL LEFT JOIN repo_aigc_article b ON toUInt64(a.article_id) = toUInt64(b.article_id)
            GLOBAL LEFT JOIN project_view_data c ON toUInt64(a.article_id) = toUInt64(c.article_id)
            GLOBAL LEFT JOIN user_reg d ON toUInt64(a.article_id) = toUInt64(d.article_id)
            """
            
            logger.info(f"执行ClickHouse查询: 日期 {date}")
            start_time = time.time()
            result = client.execute(query, {'date': date})
            query_time = time.time() - start_time
            
            # 转换为字典格式
            columns = [
                'article_id', 'csdn_first_traffic_time', 'article_cumulative_pv', 'article_cumulative_uv',
                'csdn_traffic_last_update_time', 'gitcode_first_traffic_time', 'project_cumulative_pv',
                'project_cumulative_uv', 'gitcode_traffic_last_update_time', 'first_user_register_time',
                'cumulative_register_users', 'register_users_last_update_time'
            ]
            
            data = []
            for row in result:
                data.append(dict(zip(columns, row)))
            
            logger.info(f"从ClickHouse获取到 {len(data)} 条数据，日期: {date}，查询耗时: {query_time:.2f}秒")
            return data
            
        except Exception as e:
            logger.warning(f"从ClickHouse获取数据失败（第{attempt + 1}次）: {e}")
            if attempt < SYNC_CONFIG['max_retries'] - 1:
                time.sleep(SYNC_CONFIG['retry_delay'])
                logger.info("等待重试...")
            else:
                logger.error(f"从ClickHouse获取数据失败，已达到最大重试次数")
                raise

def update_mysql_aigc_article_data(connection: pymysql.Connection, data: List[Dict]) -> None:
    """更新MySQL中的AIGC文章流量数据"""
    if not data:
        logger.info("没有数据需要更新")
        return
    
    try:
        with connection.cursor() as cursor:
            # 构建批量更新SQL - 更新表名为 github_repo_article_rw
            update_sql = """
            UPDATE github_repo_article SET
                csdn_first_traffic_time = %(csdn_first_traffic_time)s,
                article_cumulative_pv = %(article_cumulative_pv)s,
                article_cumulative_uv = %(article_cumulative_uv)s,
                csdn_traffic_last_update_time = %(csdn_traffic_last_update_time)s,
                gitcode_first_traffic_time = %(gitcode_first_traffic_time)s,
                project_cumulative_pv = %(project_cumulative_pv)s,
                project_cumulative_uv = %(project_cumulative_uv)s,
                gitcode_traffic_last_update_time = %(gitcode_traffic_last_update_time)s,
                first_user_register_time = %(first_user_register_time)s,
                cumulative_register_users = %(cumulative_register_users)s,
                register_users_last_update_time = %(register_users_last_update_time)s
            WHERE article_id = '%(article_id)s'
            """
            
            # 执行批量更新
            start_time = time.time()
            updated_count = 0
            
            for row in data:
                try:
                    # 处理ClickHouse的日期格式
                    processed_row = {}
                    for key, value in row.items():
                        if key == 'article_id':
                            # 只转换article_id为字符串
                            if value is not None:
                                processed_row[key] = str(value)
                            else:
                                processed_row[key] = None
                        if key.endswith('_time') and value:
                            # 将ClickHouse日期转换为MySQL datetime格式
                            if isinstance(value, str):
                                try:
                                    # 尝试解析ClickHouse日期格式
                                    if 'T' in value:
                                        # 处理ISO格式: 2025-08-01T00:00:00
                                        dt = datetime.fromisoformat(value.replace('Z', '+00:00'))
                                    else:
                                        # 处理简单日期格式: 2025-08-01
                                        dt = datetime.strptime(value, '%Y-%m-%d')
                                    processed_row[key] = dt
                                except ValueError:
                                    processed_row[key] = None
                            else:
                                processed_row[key] = value
                        else:
                            processed_row[key] = value
                    
                    cursor.execute(update_sql, processed_row)
                    updated_count += 1
                    
                except Exception as e:
                    logger.error(f"更新文章ID {row.get('article_id', 'unknown')} 失败: {e}")
                    continue
            
            update_time = time.time() - start_time
            logger.info(f"更新 {updated_count} 条数据成功，耗时: {update_time:.2f}秒")
        
    except Exception as e:
        logger.error(f"更新MySQL数据失败: {e}")
        raise

def sync_aigc_article_traffic():
    """同步AIGC文章流量数据的主函数"""
    mysql_conn = None
    clickhouse_client = None
    
    try:
        # 获取连接
        mysql_conn = get_mysql_connection()
        clickhouse_client = get_clickhouse_client()
        
        # 生成日期范围（倒序）
        dates = generate_date_range()
        
        total_processed = 0
        start_time = time.time()
        
        for i, date in enumerate(dates):
            date_start_time = time.time()
            logger.info(f"开始处理日期 {date} ({i+1}/{len(dates)})")
            
            try:
                # 获取当前日期的数据
                data = fetch_clickhouse_data_by_date(clickhouse_client, date)
                
                if data:
                    # 更新MySQL数据
                    update_mysql_aigc_article_data(mysql_conn, data)
                    total_processed += len(data)
                    
                    # 提交事务
                    mysql_conn.commit()
                    logger.info(f"日期 {date} 事务已提交")
                    
                    date_time = time.time() - date_start_time
                    elapsed_time = time.time() - start_time
                    avg_time_per_record = elapsed_time / total_processed if total_processed > 0 else 0
                    
                    logger.info(f"日期 {date} 处理完成，已处理 {total_processed} 条数据")
                    logger.info(f"本日期耗时: {date_time:.2f}秒，平均每条记录: {avg_time_per_record:.3f}秒")
                    
                else:
                    logger.info(f"日期 {date} 没有数据")
                
                # 短暂等待，避免过快处理
                time.sleep(0.5)
                
            except Exception as e:
                logger.error(f"处理日期 {date} 失败: {e}")
                mysql_conn.rollback()
                logger.info(f"日期 {date} 事务已回滚")
                continue
        
        total_time = time.time() - start_time
        logger.info(f"所有日期数据同步完成，总共处理 {total_processed} 条数据")
        logger.info(f"总耗时: {total_time:.2f}秒，平均速度: {total_processed/total_time:.2f}条/秒")
            
    except Exception as e:
        logger.error(f"数据同步失败: {e}")
        if mysql_conn:
            mysql_conn.rollback()
            logger.info("已回滚事务")
        raise
    finally:
        # 关闭连接
        if mysql_conn:
            mysql_conn.close()
            logger.info("MySQL连接已关闭")
        if clickhouse_client:
            clickhouse_client.disconnect()
            logger.info("ClickHouse连接已关闭")

if __name__ == "__main__":
    logger.info("开始AIGC文章流量数据同步任务")
    logger.info(f"目标数据库: {MYSQL_CONFIG['database']}, 目标表: github_repo_article_rw")
    
    try:
        sync_aigc_article_traffic()
        logger.info("AIGC文章流量数据同步任务完成")
    except KeyboardInterrupt:
        logger.info("用户中断任务")
        sys.exit(1)
    except Exception as e:
        logger.error(f"AIGC文章流量数据同步任务失败: {e}")
        sys.exit(1)