import logging
import asyncio
from datetime import datetime, timedelta
from sqlalchemy import select, func
from app.db.session import get_postgres_db, get_clickhouse_client
from app.models.task import Task
import pandas as pd
import numpy as np

logger = logging.getLogger(__name__)

async def sync_tasks_to_clickhouse():
    """
    将任务数据从 PostgreSQL 同步到 ClickHouse
    """
    try:
        # 获取最近一天的任务
        yesterday = datetime.now() - timedelta(days=1)
        
        # 从 PostgreSQL 获取数据
        async with get_postgres_db() as db:
            query = select(Task).where(Task.created_at >= yesterday)
            result = await db.execute(query)
            tasks = result.scalars().all()
            
            if not tasks:
                logger.info("没有新的任务数据需要同步")
                return
            
            logger.info(f"找到 {len(tasks)} 条需要同步的任务")
            
            # 转换为 DataFrame 以便批量插入
            data = []
            for task in tasks:
                data.append({
                    'id': task.id,
                    'title': task.title,
                    'description': task.description or '',
                    'status': task.status.value,
                    'created_at': task.created_at,
                    'updated_at': task.updated_at or datetime.now(),
                    'user_id': task.user_id or 0
                })
            
            df = pd.DataFrame(data)
        
        # 临时禁用 ClickHouse 同步，只记录日志
        logger.info(f"ClickHouse 同步暂时禁用，待同步任务数量: {len(tasks)}")
        logger.info(f"任务数据示例: {data[0] if data else 'None'}")
        
        # 以下代码暂时注释掉
        """
        # 插入到 ClickHouse
        async with get_clickhouse_client() as clickhouse:
            # 检查表是否存在，如果不存在则创建
            clickhouse.execute('''
                CREATE TABLE IF NOT EXISTS tasks (
                    id UInt32,
                    title String,
                    description String,
                    status String,
                    created_at DateTime,
                    updated_at DateTime,
                    user_id UInt32
                ) ENGINE = MergeTree()
                ORDER BY (created_at, id)
            ''')
            
            # 批量插入数据
            clickhouse.execute(
                '''
                INSERT INTO tasks (id, title, description, status, created_at, updated_at, user_id)
                VALUES
                ''',
                df.to_dict('records')
            )
        """
        
        logger.info(f"任务数据已记录，跳过 ClickHouse 同步")
    
    except Exception as e:
        logger.error(f"同步任务数据到 ClickHouse 失败: {str(e)}")
        raise

async def get_task_analytics():
    """
    从 ClickHouse 获取任务分析数据
    """
    try:
        # 临时返回模拟数据
        logger.info("返回模拟的任务分析数据（ClickHouse 暂时禁用）")
        
        # 模拟数据
        status_counts = [
            ('pending', 3),
            ('in_progress', 2),
            ('completed', 4),
            ('failed', 1)
        ]
        
        # 模拟每日数据
        daily_counts = []
        for i in range(7):
            date = (datetime.now() - timedelta(days=i)).replace(hour=0, minute=0, second=0, microsecond=0)
            daily_counts.append((date, i + 1))
        
        # 模拟平均完成时间
        avg_completion_time = 120  # 2小时
        
        return {
            "status_distribution": [{"status": s, "count": c} for s, c in status_counts],
            "daily_creation": [{"date": d.strftime("%Y-%m-%d"), "count": c} for d, c in daily_counts],
            "avg_completion_time_minutes": avg_completion_time
        }
        
        # 以下代码暂时注释掉
        """
        async with get_clickhouse_client() as clickhouse:
            # 按状态统计任务数量
            status_counts = clickhouse.execute('''
                SELECT status, count() as count
                FROM tasks
                GROUP BY status
                ORDER BY count DESC
            ''')
            
            # 按天统计任务创建数量
            daily_counts = clickhouse.execute('''
                SELECT
                    toDate(created_at) as date,
                    count() as count
                FROM tasks
                GROUP BY date
                ORDER BY date DESC
                LIMIT 30
            ''')
            
            # 平均任务完成时间（从创建到完成）
            avg_completion_time = clickhouse.execute('''
                SELECT avg(dateDiff('minute', created_at, updated_at)) as avg_minutes
                FROM tasks
                WHERE status = 'completed'
            ''')
        
        return {
            "status_distribution": [{"status": s, "count": c} for s, c in status_counts],
            "daily_creation": [{"date": d.strftime("%Y-%m-%d"), "count": c} for d, c in daily_counts],
            "avg_completion_time_minutes": avg_completion_time[0][0] if avg_completion_time else 0
        }
        """
    
    except Exception as e:
        logger.error(f"获取任务分析数据失败: {str(e)}")
        raise

# 定时同步任务
async def schedule_sync_tasks():
    """
    定时将数据从 PostgreSQL 同步到 ClickHouse
    """
    while True:
        try:
            await sync_tasks_to_clickhouse()
        except Exception as e:
            logger.error(f"定时同步任务失败: {str(e)}")
        
        # 每小时同步一次
        await asyncio.sleep(3600) 