import time
import json
from fastapi import APIRouter, Depends
from fastapi.responses import StreamingResponse
from langchain_core.messages import HumanMessage, AIMessage, SystemMessage
from sqlalchemy.orm import Session
from typing import Optional

from app.schemas.chat import ChatBody
from app.services.chat_service_simple import ChatServiceSimple
from app.core.database import get_db, get_sync_db

router = APIRouter()
@router.post("/conversion")
async def conversion(chat_body: ChatBody):
    serv = ChatServiceSimple(temperature=1.0)
    result = serv.chain_chat(chat_body.message)
    return {"message": result}


#生成一个支持AI流式输出的接口
@router.get("/stream")
async def stream(msg: str):
    serv = ChatServiceSimple(temperature=1.0)
    
    async def generate_stream():
        async for chunk in serv.stream_chat_with_role(msg):
            yield f"data: {chunk.content}\n\n"
    
    return StreamingResponse(
        generate_stream(),
        media_type="text/event-stream",
        headers={
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
            "Access-Control-Allow-Origin": "*",
            "Access-Control-Allow-Headers": "*"
        }
    )

# POST方式的流式接口，更符合前端需求
@router.post("/stream")
async def chat_stream(chat_body: ChatBody):
    serv = ChatServiceSimple(temperature=1.0)
    history_messages = []
    if chat_body.history:
        for msg in chat_body.history:
            if msg.role == "user":
                history_messages.append(HumanMessage(content=msg.content))
            elif msg.role == "assistant":
                history_messages.append(AIMessage(content=msg.content))
            elif msg.role == "system":
                history_messages.append(SystemMessage(content=msg.content))

    async def generate_stream():
        async for chunk in serv.chain_chat_with_stream(chat_body.message, history=history_messages):
            yield f"data: {chunk}\n\n"
    
    return StreamingResponse(
        generate_stream(),
        media_type="text/event-stream",
        headers={
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
            "Access-Control-Allow-Origin": "*",
            "Access-Control-Allow-Headers": "*"
        }
    )

@router.get("/batch")
async def batch(msg: str):
    serv = ChatServiceSimple(temperature=1.0)
    result = serv.chat_with_batch(msg)
    return {"message": result[0].content, "history": result[1].content}



@router.post("/long_conversion")
async def long_conversion(chat_body: ChatBody):
    serv = ChatServiceSimple(temperature=1.0)
    result = serv.chain_with_parallel(chat_body.message)
    return {"message": result}

@router.post("/retrieval")
async def retrieval(chat_body: ChatBody):
    serv = ChatServiceSimple(temperature=1.0)
    result = serv.chain_with_retrieval(chat_body.message)
    return {"message": result}


@router.post("/callback")
async def callback(chat_body: ChatBody):
    serv = ChatServiceSimple(temperature=1.0)
    result = serv.chain_with_callback(chat_body.message)
    return {"message": result}

@router.post("/memory")
async def memory(chat_body: ChatBody):
    serv = ChatServiceSimple(temperature=1.0)
    result = serv.chain_chat_with_chat_history(chat_body.message)
    return {"message": result}


@router.post("/")
async def chat(chat_body: ChatBody):
    """
    非流式聊天接口
    
    Args:
        chat_body: 聊天请求体，包含用户消息和可选的对话ID
    
    Returns:
        dict: 包含回复内容、对话ID和消息ID的响应
    """
    serv = ChatServiceSimple(temperature=1.0)
    result = serv.chain_chat(chat_body.message)
    return {
        "content": result,
        "conversation_id": chat_body.conversation_id or "unknown",
        "message_id": str(int(time.time() * 1000))
    }


@router.post("/stream-with-memory")
async def chat_stream_with_memory(
    chat_body: ChatBody,
    db: Session = Depends(get_sync_db)
):
    """
    使用MySQL数据库和摘要记忆功能的流式聊天接口
    
    Args:
        chat_body: 聊天请求体，包含用户消息、conversation_id和user_id
        db: 数据库会话
    
    Returns:
        StreamingResponse: 流式响应
    """
    serv = ChatServiceSimple(temperature=1.0)
    
    async def generate_stream():
        async for chunk in serv.chain_chat_with_memory_stream(
            query=chat_body.message,
            conversation_id=chat_body.conversation_id,
            user_id=chat_body.user_id,
            db=db
        ):
            yield f"data: {chunk}\n\n"
    
    return StreamingResponse(
        generate_stream(),
        media_type="text/event-stream",
        headers={
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
            "Access-Control-Allow-Origin": "*",
            "Access-Control-Allow-Headers": "*"
        }
    )


@router.get("/conversation/{conversation_id}/history")
async def get_conversation_history(
    conversation_id: int,
    limit: Optional[int] = 50,
    db: Session = Depends(get_sync_db)
):
    """
    获取对话历史
    
    Args:
        conversation_id: 对话ID
        limit: 返回消息数量限制
        db: 数据库会话
    
    Returns:
        dict: 包含消息列表的响应
    """
    serv = ChatServiceSimple(temperature=1.0)
    messages = await serv.get_conversation_history(conversation_id, limit, db)
    return {"conversation_id": conversation_id, "messages": messages}


@router.get("/conversation/{conversation_id}/summaries")
async def get_conversation_summaries(
    conversation_id: int,
    db: Session = Depends(get_sync_db)
):
    """
    获取对话摘要
    
    Args:
        conversation_id: 对话ID
        db: 数据库会话
    
    Returns:
        dict: 包含摘要列表的响应
    """
    serv = ChatServiceSimple(temperature=1.0)
    summaries = await serv.get_conversation_summaries(conversation_id, db)
    return {"conversation_id": conversation_id, "summaries": summaries}


@router.post("/agent")
async def agent_chat(
    chat_body: ChatBody,
    db: Session = Depends(get_sync_db)
):
    """
    智能体聊天接口 - 具备工具调用和网络访问能力
    
    Args:
        chat_body: 聊天请求体，包含消息、对话ID、用户ID等
        db: 数据库会话
        
    Returns:
        dict: 包含智能体回复的响应
    """
    try:
        serv = ChatServiceSimple(temperature=0.7)
        
        # 从请求体中获取参数
        query = chat_body.message
        conversation_id = getattr(chat_body, 'conversation_id', 1)
        user_id = getattr(chat_body, 'user_id', 1)
        
        # 调用智能体方法
        response = await serv.agent_chat(
            query=query,
            conversation_id=conversation_id,
            user_id=user_id
        )
        
        return {
            "message": response,
            "type": "agent",
            "timestamp": time.time(),
            "conversation_id": conversation_id,
            "user_id": user_id
        }
        
    except Exception as e:
        return {
            "error": f"智能体处理失败: {str(e)}",
            "message": "抱歉，智能体暂时无法处理您的请求，请稍后再试。",
            "type": "error"
        }

@router.post("/agent/stream")
async def agent_chat_stream(chat_body: ChatBody):
    """
    智能体流式聊天接口
    """
    async def generate():
        try:
            chat_service = ChatServiceSimple()
            # 确保user_id有默认值
            user_id = chat_body.user_id or 1
            async for chunk in chat_service.agent_chat_stream(
                query=chat_body.message,
                conversation_id=chat_body.conversation_id,
                user_id=user_id
            ):
                # 使用 Server-Sent Events 格式
                yield f"data: {chunk}\n\n"
        except Exception as e:
            error_data = json.dumps({'error': str(e)}, ensure_ascii=False)
            yield f"data: {error_data}\n\n"
            yield "data: [DONE]\n\n"
        else:
            yield "data: [DONE]\n\n"
    
    return StreamingResponse(
        generate(),
        media_type="text/event-stream",
        headers={
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
            "Access-Control-Allow-Origin": "*",
            "Access-Control-Allow-Headers": "*",
        }
    )

@router.get("/conversations/user/{user_id}")
async def get_user_conversations(
    user_id: int,
    limit: Optional[int] = 50,
    db: Session = Depends(get_sync_db)
):
    """
    获取用户的对话列表
    
    Args:
        user_id: 用户ID
        limit: 返回对话数量限制
        db: 数据库会话
    
    Returns:
        dict: 包含对话列表的响应
    """
    from app.models.chat import Conversation, Message
    from sqlalchemy import desc
    
    try:
        conversations = db.query(Conversation).filter(
            Conversation.user_id == user_id,
            Conversation.is_active == True
        ).order_by(desc(Conversation.last_activity_at)).limit(limit).all()
        
        conversation_list = []
        for conv in conversations:
            # 获取最后一条消息作为预览
            last_message = ""
            last_msg = db.query(Message).filter(
                Message.conversation_id == conv.id
            ).order_by(desc(Message.sequence_number)).first()
            
            if last_msg:
                last_message = last_msg.content[:50] + ("..." if len(last_msg.content) > 50 else "")
            
            conversation_list.append({
                "id": conv.id,
                "title": conv.title,
                "lastMessage": last_message,
                "timestamp": conv.last_activity_at.isoformat(),
                "messageCount": conv.message_count,
                "chatMode": conv.chat_mode.value if conv.chat_mode else "NORMAL"
            })
        
        return {"conversations": conversation_list}
    except Exception as e:
        print(f"Error getting user conversations: {e}")
        return {"conversations": []}


