from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from qwen_agent.agents import Assistant
import json
import uvicorn
import logging
import os
import re
import uuid
import asyncio
from concurrent.futures import ThreadPoolExecutor
from typing import Dict, Any, Optional
from enum import Enum
import threading
from datetime import datetime

# Configure logging
log_directory = "logs"
if not os.path.exists(log_directory):
    os.makedirs(log_directory)
log_file_path = os.path.join(log_directory, "client.log")

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    handlers=[
        logging.FileHandler(log_file_path, encoding='utf-8'),
        logging.StreamHandler()
    ]
)
logger = logging.getLogger(__name__)

app = FastAPI(
    title="MCP API Service",
    description="API service for MCP report generation and updates",
    version="1.0.0"
)

class TaskStatus(str, Enum):
    PENDING = "pending"
    RUNNING = "running"
    COMPLETED = "completed"
    FAILED = "failed"

class ReportRequest(BaseModel):
    common_params: dict
    content_segments: list

class TaskResponse(BaseModel):
    task_id: str
    status: TaskStatus
    message: str

class TaskStatusResponse(BaseModel):
    task_id: str
    status: TaskStatus
    progress: Optional[int] = None
    result: Optional[Dict[str, Any]] = None
    error: Optional[str] = None
    created_at: Optional[str] = None
    completed_at: Optional[str] = None

# 全局任务存储
tasks_storage: Dict[str, Dict[str, Any]] = {}
tasks_lock = threading.Lock()

def init_agent_service():
    try:
        from .config import llm_cfg
    except ImportError:
        from config import llm_cfg

    # 定义MCP服务配置
    tools = [{
        "mcpServers": {
            "xinjinchengmcp": {
                "url": "http://localhost:8000/sse"
            }
        }
    }]

    with open(os.path.join(os.path.dirname(__file__), 'system_prompt.txt'), 'r', encoding='utf-8') as f:
        system_prompt = f.read()

    bot = Assistant(
        llm=llm_cfg,
        name='报告更新专家',
        description='报告更新专家专注于通过智能工具动态生成准确的报告内容，确保数据始终保持最新状态。',
        system_message=system_prompt,
        function_list=tools,
    )

    return bot

def process_single_segment(segment, common_params, segment_index):
    """处理单个segment的函数"""
    try:
        bot = init_agent_service()
        
        # Construct a new request for each segment
        segment_request_data = {
            "common_params": common_params,
            "content_segments": [segment]
        }
        
        messages = [{'role': 'user', 'content': [{'text': json.dumps(segment_request_data, ensure_ascii=False)}]}]
        
        response_stream = bot.run(messages)
        
        final_response = ""
        for response in response_stream:
            if response:
                final_response = response

        logger.info(f"Final response from agent for segment {segment_index}: {final_response}")
        content = final_response[-1]['content']

        # Extract json from markdown code block
        match = re.search(r"```json\n(.*?)\n```", content, re.DOTALL)
        if match:
            json_text = match.group(1).strip()
        else:
            json_text = content

        content_json = json.loads(json_text)
        if 'content_segments' in content_json and content_json['content_segments']:
            return segment_index, content_json['content_segments'][0]
        else:
            return segment_index, segment
            
    except Exception as e:
        logger.error(f"Error processing segment {segment_index}: {e}", exc_info=True)
        return segment_index, segment

def process_segments_parallel(request: ReportRequest, task_id: str):
    """并行处理所有segments"""
    try:
        with tasks_lock:
            tasks_storage[task_id]['status'] = TaskStatus.RUNNING
            tasks_storage[task_id]['progress'] = 0
        
        updated_segments = [None] * len(request.content_segments)
        
        with ThreadPoolExecutor(max_workers=5) as executor:
            # 提交所有任务
            future_to_index = {
                executor.submit(process_single_segment, segment, request.common_params, i): i 
                for i, segment in enumerate(request.content_segments)
            }
            
            completed_count = 0
            for future in future_to_index:
                try:
                    segment_index, processed_segment = future.result()
                    updated_segments[segment_index] = processed_segment
                    completed_count += 1
                    
                    # 更新进度
                    progress = int((completed_count / len(request.content_segments)) * 100)
                    with tasks_lock:
                        tasks_storage[task_id]['progress'] = progress
                        
                except Exception as e:
                    logger.error(f"Error in future result: {e}", exc_info=True)
                    segment_index = future_to_index[future]
                    updated_segments[segment_index] = request.content_segments[segment_index]
        
        final_result = {"content_segments": updated_segments}
        
        with tasks_lock:
            tasks_storage[task_id]['status'] = TaskStatus.COMPLETED
            tasks_storage[task_id]['result'] = final_result
            tasks_storage[task_id]['completed_at'] = datetime.now().isoformat()
            tasks_storage[task_id]['progress'] = 100
            
        logger.info(f"Task {task_id} completed successfully")
        
    except Exception as e:
        logger.error(f"Error in parallel processing for task {task_id}: {e}", exc_info=True)
        with tasks_lock:
            tasks_storage[task_id]['status'] = TaskStatus.FAILED
            tasks_storage[task_id]['error'] = str(e)
            tasks_storage[task_id]['completed_at'] = datetime.now().isoformat()

@app.post("/api/reports/full-update")
def run_query(request: ReportRequest):
    """同步处理接口（保持向后兼容）"""
    logger.info(f"Received synchronous request: {request.model_dump_json()}")
    
    # 创建临时任务ID用于同步处理
    task_id = str(uuid.uuid4())
    
    with tasks_lock:
        tasks_storage[task_id] = {
            'status': TaskStatus.PENDING,
            'progress': 0,
            'result': None,
            'error': None,
            'created_at': datetime.now().isoformat(),
            'completed_at': None
        }
    
    # 直接执行并行处理
    process_segments_parallel(request, task_id)
    
    # 返回结果
    with tasks_lock:
        task_data = tasks_storage[task_id]
        if task_data['status'] == TaskStatus.COMPLETED:
            result = task_data['result']
            # 清理临时任务
            del tasks_storage[task_id]
            logger.info(f"Returning synchronous result: {result}")
            return result
        else:
            # 清理临时任务
            error_msg = task_data.get('error', 'Unknown error')
            del tasks_storage[task_id]
            raise HTTPException(status_code=500, detail=f"Processing failed: {error_msg}")

@app.post("/api/reports/async-update", response_model=TaskResponse)
def create_async_task(request: ReportRequest):
    """异步创建任务接口"""
    logger.info(f"Received async request: {request.model_dump_json()}")
    
    # 生成唯一任务ID
    task_id = str(uuid.uuid4())
    
    # 初始化任务状态
    with tasks_lock:
        tasks_storage[task_id] = {
            'status': TaskStatus.PENDING,
            'progress': 0,
            'result': None,
            'error': None,
            'created_at': datetime.now().isoformat(),
            'completed_at': None
        }
    
    # 在后台线程中启动处理
    import threading
    thread = threading.Thread(target=process_segments_parallel, args=(request, task_id))
    thread.daemon = True
    thread.start()
    
    logger.info(f"Created async task: {task_id}")
    return TaskResponse(
        task_id=task_id,
        status=TaskStatus.PENDING,
        message="任务已创建，正在处理中"
    )

@app.get("/api/reports/task-status/{task_id}", response_model=TaskStatusResponse)
def get_task_status(task_id: str):
    """查询任务状态接口"""
    with tasks_lock:
        if task_id not in tasks_storage:
            raise HTTPException(status_code=404, detail="任务不存在")
        
        task_data = tasks_storage[task_id]
        
        return TaskStatusResponse(
            task_id=task_id,
            status=task_data['status'],
            progress=task_data['progress'],
            result=task_data['result'],
            error=task_data['error'],
            created_at=task_data['created_at'],
            completed_at=task_data['completed_at']
        )

if __name__ == '__main__':
    uvicorn.run(app, host="0.0.0.0", port=8080)