import logging
from sqlalchemy import text
from app.db.session import get_citusdb, get_clickhouse_client, cache_config
from app.core.config import settings
import asyncio
import pandas as pd
import json
from aiocache import cached
from datetime import datetime, timedelta
import random

logger = logging.getLogger(__name__)

@cached(**cache_config, key_builder=lambda *args, **kwargs: f"task_performance_{kwargs.get('days', 30)}")
async def get_task_performance_metrics(days: int = 30):
    """
    使用 CitusDB 获取任务性能指标
    """
    try:
        # 返回模拟数据
        logger.info(f"返回模拟的任务性能指标数据（{days}天）")
        
        # 模拟数据
        statuses = ['pending', 'in_progress', 'completed', 'failed']
        metrics = []
        
        for status in statuses:
            # 生成随机数据
            task_count = random.randint(5, 20)
            avg_time = random.randint(1800, 7200)  # 30分钟到2小时
            min_time = avg_time - random.randint(600, 1200)
            max_time = avg_time + random.randint(1200, 3600)
            median_time = (min_time + max_time) // 2
            
            metrics.append({
                "status": status,
                "task_count": task_count,
                "avg_completion_time_seconds": avg_time,
                "min_completion_time_seconds": min_time,
                "max_completion_time_seconds": max_time,
                "median_completion_time_seconds": median_time
            })
        
        return metrics
        
    except Exception as e:
        logger.error(f"获取任务性能指标失败: {str(e)}")
        raise

@cached(**cache_config)
async def get_combined_analytics():
    """
    结合 CitusDB 和 ClickHouse 的数据进行综合分析
    """
    try:
        # 返回模拟数据
        logger.info("返回模拟的综合分析数据")
        
        # 获取模拟的任务性能指标
        performance_metrics = await get_task_performance_metrics()
        
        # 模拟用户参与度数据
        user_engagement = []
        for i in range(10):
            user_id = i + 1
            task_count = random.randint(5, 30)
            first_date = datetime.now() - timedelta(days=random.randint(30, 90))
            last_date = datetime.now() - timedelta(days=random.randint(0, 10))
            active_days = (last_date - first_date).days
            
            user_engagement.append({
                "user_id": user_id,
                "task_count": task_count,
                "first_task_date": first_date.strftime("%Y-%m-%d"),
                "last_task_date": last_date.strftime("%Y-%m-%d"),
                "active_days": active_days
            })
        
        # 模拟状态转换数据
        status_transition = []
        for status in ['pending', 'in_progress', 'completed', 'failed']:
            count = random.randint(5, 20)
            hours = random.randint(1, 48)
            
            status_transition.append({
                "status": status,
                "count": count,
                "avg_hours_to_status": hours
            })
        
        # 合并分析结果
        return {
            "performance_metrics": performance_metrics,
            "user_engagement": user_engagement,
            "status_transition": status_transition
        }
        
    except Exception as e:
        logger.error(f"获取综合分析数据失败: {str(e)}")
        raise

async def generate_analytics_report(days: int = 30, format: str = "json"):
    """
    生成分析报告
    """
    try:
        # 获取综合分析数据
        analytics_data = await get_combined_analytics()
        
        if format.lower() == "csv":
            # 转换为 DataFrame 并导出为 CSV
            performance_df = pd.DataFrame(analytics_data["performance_metrics"])
            user_df = pd.DataFrame(analytics_data["user_engagement"])
            status_df = pd.DataFrame(analytics_data["status_transition"])
            
            # 合并到一个 Excel 文件
            with pd.ExcelWriter("analytics_report.xlsx") as writer:
                performance_df.to_excel(writer, sheet_name="Performance", index=False)
                user_df.to_excel(writer, sheet_name="User Engagement", index=False)
                status_df.to_excel(writer, sheet_name="Status Transition", index=False)
            
            return "analytics_report.xlsx"
        else:
            # 返回 JSON 格式
            return json.dumps(analytics_data, indent=2)
    
    except Exception as e:
        logger.error(f"生成分析报告失败: {str(e)}")
        raise 