"""
LangGraph API端点
提供工作流编排的LLM接口
"""
from typing import List, Optional
from fastapi import APIRouter, Depends, HTTPException, Query
from fastapi.responses import StreamingResponse
from sqlalchemy.orm import Session
from app.api.deps import get_db, get_current_user
from app.models.user import User
from app.graphs.service import langgraph_service
from app.graphs.core import WorkflowConfig
import json

router = APIRouter()


@router.get("/workflows")
async def get_available_workflows():
    """获取可用工作流列表"""
    workflows = await langgraph_service.list_available_workflows()
    
    return {
        "workflows": workflows,
        "count": len(workflows)
    }


@router.get("/workflows/{workflow_type}/config")
async def get_workflow_config(workflow_type: str):
    """获取工作流配置"""
    config = await langgraph_service.get_workflow_config(workflow_type)
    
    if not config:
        raise HTTPException(status_code=404, detail="工作流不存在")
    
    return config


@router.post("/conversations/workflow")
async def create_workflow_conversation(
    title: str,
    model_id: int,
    workflow_type: str = Query("simple_chat", description="工作流类型"),
    db: Session = Depends(get_db),
    current_user: User = Depends(get_current_user)
):
    """创建工作流对话"""
    try:
        result = await langgraph_service.create_conversation_with_workflow(
            db=db,
            user_id=current_user.id,
            title=title,
            model_id=model_id,
            workflow_type=workflow_type
        )
        
        return result
    
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))


@router.post("/chat/workflow")
async def chat_with_workflow(
    conversation_id: int,
    message: str,
    workflow_type: str = Query("simple_chat", description="工作流类型"),
    stream: bool = Query(False, description="是否流式响应"),
    db: Session = Depends(get_db),
    current_user: User = Depends(get_current_user)
):
    """使用工作流聊天"""
    try:
        if stream:
            # 流式响应
            async def generate_stream():
                async for chunk in await langgraph_service.process_message_with_workflow(
                    db=db,
                    conversation_id=conversation_id,
                    user_message=message,
                    workflow_type=workflow_type,
                    stream=True
                ):
                    yield f"data: {json.dumps({'content': chunk, 'is_complete': False})}\n\n"
                
                yield f"data: {json.dumps({'content': '', 'is_complete': True})}\n\n"
            
            return StreamingResponse(
                generate_stream(),
                media_type="text/event-stream",
                headers={
                    "Cache-Control": "no-cache",
                    "Connection": "keep-alive",
                }
            )
        
        else:
            # 普通响应
            result = await langgraph_service.process_message_with_workflow(
                db=db,
                conversation_id=conversation_id,
                user_message=message,
                workflow_type=workflow_type,
                stream=False
            )
            
            return result
    
    except ValueError as e:
        raise HTTPException(status_code=404, detail=str(e))
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))


@router.get("/workflows/{workflow_id}/status")
async def get_workflow_status(
    workflow_id: str,
    current_user: User = Depends(get_current_user)
):
    """获取工作流状态"""
    status = await langgraph_service.get_workflow_status(workflow_id)
    
    if not status:
        raise HTTPException(status_code=404, detail="工作流不存在")
    
    return status


@router.delete("/workflows/{workflow_id}")
async def cancel_workflow(
    workflow_id: str,
    current_user: User = Depends(get_current_user)
):
    """取消工作流"""
    success = await langgraph_service.cancel_workflow(workflow_id)
    
    if not success:
        raise HTTPException(status_code=404, detail="工作流不存在")
    
    return {"message": "工作流已取消"}


@router.post("/workflows/benchmark")
async def benchmark_workflows(
    message: str,
    model_id: int,
    workflow_types: List[str] = Query(["simple_chat", "tool_enhanced"], description="要测试的工作流类型"),
    db: Session = Depends(get_db),
    current_user: User = Depends(get_current_user)
):
    """工作流性能对比测试"""
    try:
        # 创建临时对话用于测试
        from app.models.chat_conversation import ChatConversation
        
        conversation = ChatConversation(
            user_id=current_user.id,
            title="性能测试",
            model_id=model_id
        )
        db.add(conversation)
        db.commit()
        db.refresh(conversation)
        
        benchmark_results = []
        
        for workflow_type in workflow_types:
            try:
                # 执行工作流
                start_time = datetime.now()
                
                result = await langgraph_service.process_message_with_workflow(
                    db=db,
                    conversation_id=conversation.id,
                    user_message=message,
                    workflow_type=workflow_type,
                    stream=False
                )
                
                end_time = datetime.now()
                execution_time = (end_time - start_time).total_seconds()
                
                benchmark_results.append({
                    "workflow_type": workflow_type,
                    "success": result["success"],
                    "execution_time": execution_time,
                    "total_tokens": result["total_tokens"],
                    "estimated_cost": result["estimated_cost"],
                    "tools_used": result["tools_used"],
                    "function_calls": result["function_calls"]
                })
                
            except Exception as e:
                benchmark_results.append({
                    "workflow_type": workflow_type,
                    "success": False,
                    "error": str(e)
                })
        
        # 清理测试对话
        db.delete(conversation)
        db.commit()
        
        return {
            "benchmark_results": benchmark_results,
            "message": message,
            "model_id": model_id
        }
    
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))


@router.get("/workflows/{workflow_type}/visualize")
async def visualize_workflow(workflow_type: str):
    """可视化工作流结构"""
    try:
        config = await langgraph_service.get_workflow_config(workflow_type)
        
        if not config:
            raise HTTPException(status_code=404, detail="工作流不存在")
        
        # 生成Mermaid格式的流程图
        mermaid_code = f"""graph TD
    Start --> ValidateInput
    """
        
        # 添加节点
        for node in config.get("nodes", []):
            mermaid_code += f"    {node}[{node}]\n"
        
        # 添加边
        for edge in config.get("edges", []):
            if "-->" in str(edge):
                mermaid_code += f"    {edge}\n"
        
        mermaid_code += "    Complete --> End"
        
        return {
            "workflow_type": workflow_type,
            "mermaid_diagram": mermaid_code,
            "config": config
        }
    
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))