"""
JH子系统 AI聊天 API路由
提供基于Ollama的AI助手聊天功能
"""

from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks
from fastapi.responses import StreamingResponse
from typing import Dict, List, Any, Optional
from pydantic import BaseModel
import json
import asyncio
import logging
from datetime import datetime

from ..dependencies import get_db
from ...ollama_service import OllamaService, JHChatService
from ....jh_subsystem.job_data_service import job_data_service

logger = logging.getLogger(__name__)

router = APIRouter(prefix="/jh/chat", tags=["JH-AI聊天"])

# Pydantic 模型定义
class ChatMessage(BaseModel):
    """聊天消息"""
    message: str
    context_data: Optional[Dict[str, Any]] = None
    stream: bool = False
    chart_id: Optional[str] = None  # 图表ID，用于区分不同图表的对话历史

class ChatResponse(BaseModel):
    """聊天响应"""
    response: str
    timestamp: str
    token_usage: Optional[Dict[str, Any]] = None
    error: Optional[str] = None

class ConversationSummary(BaseModel):
    """对话摘要"""
    total_messages: int
    user_messages: int
    assistant_messages: int
    last_interaction: Optional[str] = None

# 全局服务实例
ollama_service = None
jh_chat_service = None

async def get_chat_service() -> JHChatService:
    """获取聊天服务实例"""
    global ollama_service, jh_chat_service
    
    if not ollama_service:
        # 初始化Ollama服务
        ollama_service = OllamaService(
            base_url="http://localhost:11434",
            model_name="qwen2.5:latest",
            timeout=60
        )
        
        # 检查Ollama服务是否可用
        if not await ollama_service.is_available():
            raise HTTPException(
                status_code=503, 
                detail="Ollama服务不可用，请确保Ollama已启动并运行在localhost:11434"
            )
        
        # 检查qwen模型是否存在
        if not await ollama_service.check_model_exists():
            available_models = await ollama_service.list_models()
            model_names = [m.get("name", "") for m in available_models]
            raise HTTPException(
                status_code=404,
                detail=f"Qwen模型不存在。可用模型: {', '.join(model_names)}"
            )
    
    if not jh_chat_service:
        jh_chat_service = JHChatService(ollama_service)
    
    return jh_chat_service

@router.post("/message", response_model=ChatResponse)
async def send_message(
    chat_data: ChatMessage,
    chat_service: JHChatService = Depends(get_chat_service)
):
    """发送聊天消息并获取AI回复"""
    try:
        # 增强上下文数据
        enhanced_context = await _enhance_context_with_db_data(chat_data.context_data)
        
        # 调用聊天服务
        result = await chat_service.chat(
            user_message=chat_data.message,
            context_data=enhanced_context,
            stream=chat_data.stream,
            chart_id=chat_data.chart_id
        )
        
        return ChatResponse(**result)
        
    except Exception as e:
        logger.error(f"聊天消息处理失败: {e}")
        raise HTTPException(status_code=500, detail=f"聊天服务错误: {str(e)}")

@router.post("/stream")
async def send_message_stream(
    chat_data: ChatMessage,
    chat_service: JHChatService = Depends(get_chat_service)
):
    """流式发送聊天消息"""
    try:
        # 增强上下文数据
        enhanced_context = await _enhance_context_with_db_data(chat_data.context_data)
        
        async def generate_response():
            try:
                # 设置当前图表ID
                if chat_data.chart_id:
                    chat_service.set_current_chart(chat_data.chart_id)
                    
                # 获取当前图表的对话历史
                conversation_history = chat_service._get_chart_conversation_history(chat_service.current_chart_id)
                
                # 添加用户消息到历史记录
                conversation_history.append({
                    "role": "user",
                    "content": chat_data.message
                })
                
                # 增强消息
                enhanced_message = chat_service._enhance_message_with_context(
                    chat_data.message, enhanced_context
                )
                
                # 准备消息列表
                messages = conversation_history[-20:]
                if enhanced_message != chat_data.message:
                    messages[-1]["content"] = enhanced_message
                
                full_response = ""
                
                # 流式生成
                async for chunk in chat_service.ollama.generate_stream(
                    messages=messages,
                    system_prompt=chat_service._get_system_prompt()
                ):
                    full_response += chunk
                    # 发送SSE格式的数据
                    yield f"data: {json.dumps({'content': chunk, 'done': False}, ensure_ascii=False)}\n\n"
                
                # 添加完整回复到历史记录
                conversation_history.append({
                    "role": "assistant",
                    "content": full_response
                })
                
                # 发送完成信号
                yield f"data: {json.dumps({'content': '', 'done': True}, ensure_ascii=False)}\n\n"
                
            except Exception as e:
                logger.error(f"流式响应生成失败: {e}")
                yield f"data: {json.dumps({'error': str(e)}, ensure_ascii=False)}\n\n"
        
        return StreamingResponse(
            generate_response(),
            media_type="text/plain",
            headers={
                "Cache-Control": "no-cache",
                "Connection": "keep-alive",
                "Access-Control-Allow-Origin": "*",
                "Access-Control-Allow-Headers": "*"
            }
        )
        
    except Exception as e:
        logger.error(f"流式聊天处理失败: {e}")
        raise HTTPException(status_code=500, detail=f"流式聊天服务错误: {str(e)}")

@router.get("/conversation/summary", response_model=ConversationSummary)
async def get_conversation_summary(
    chat_service: JHChatService = Depends(get_chat_service)
):
    """获取对话摘要"""
    try:
        summary = chat_service.get_conversation_summary()
        return ConversationSummary(**summary)
    except Exception as e:
        logger.error(f"获取对话摘要失败: {e}")
        raise HTTPException(status_code=500, detail=f"获取对话摘要失败: {str(e)}")

class SetChartRequest(BaseModel):
    """设置图表请求"""
    chart_id: str

@router.post("/conversation/chart")
async def set_current_chart(
    request: SetChartRequest,
    chat_service: JHChatService = Depends(get_chat_service)
):
    """设置当前图表ID"""
    try:
        chat_service.set_current_chart(request.chart_id)
        return {
            "message": f"已切换到图表 {request.chart_id}",
            "chart_id": request.chart_id,
            "timestamp": datetime.now().isoformat()
        }
    except Exception as e:
        logger.error(f"设置当前图表失败: {e}")
        raise HTTPException(status_code=500, detail=f"设置当前图表失败: {str(e)}")

@router.delete("/conversation/clear")
async def clear_conversation(
    chart_id: Optional[str] = None,
    chat_service: JHChatService = Depends(get_chat_service)
):
    """清除对话历史"""
    try:
        chat_service.clear_history(chart_id)
        message = f"图表 {chart_id} 的对话历史已清除" if chart_id else "当前图表的对话历史已清除"
        return {"message": message, "chart_id": chart_id, "timestamp": datetime.now().isoformat()}
    except Exception as e:
        logger.error(f"清除对话历史失败: {e}")
        raise HTTPException(status_code=500, detail=f"清除对话历史失败: {str(e)}")

@router.get("/status")
async def get_chat_status():
    """获取聊天服务状态"""
    try:
        global ollama_service
        
        if not ollama_service:
            ollama_service = OllamaService()
        
        is_available = await ollama_service.is_available()
        models = await ollama_service.list_models() if is_available else []
        model_exists = await ollama_service.check_model_exists() if is_available else False
        
        return {
            "ollama_available": is_available,
            "qwen_model_available": model_exists,
            "available_models": [m.get("name", "") for m in models],
            "current_model": ollama_service.model_name,
            "timestamp": datetime.now().isoformat()
        }
        
    except Exception as e:
        logger.error(f"获取聊天状态失败: {e}")
        return {
            "ollama_available": False,
            "qwen_model_available": False,
            "available_models": [],
            "current_model": None,
            "error": str(e),
            "timestamp": datetime.now().isoformat()
        }

@router.get("/suggestions")
async def get_chat_suggestions():
    """获取聊天建议话题"""
    try:
        # 从配置文件获取建议问题
        from config.ollama_config import get_suggested_questions
        suggestions = get_suggested_questions()
        return {"suggestions": suggestions, "timestamp": datetime.now().isoformat()}
    except Exception as e:
        logger.error(f"获取建议话题失败: {e}")
        # 返回基础建议作为备选
        fallback_suggestions = [
            {
                "category": "简历优化",
                "questions": [
                    "如何优化我的简历以提高通过率？",
                    "简历中应该突出哪些技能？",
                    "如何写出吸引HR的工作经历描述？"
                ]
            },
            {
                "category": "面试准备",
                "questions": [
                    "面试前需要准备哪些问题？",
                    "如何回答技术面试问题？",
                    "面试时如何展示我的项目经验？"
                ]
            }
        ]
        return {"suggestions": fallback_suggestions, "timestamp": datetime.now().isoformat()}

async def _enhance_context_with_db_data(context_data: Dict[str, Any] = None) -> Dict[str, Any]:
    """使用数据库数据增强上下文"""
    enhanced_context = context_data.copy() if context_data else {}
    
    try:
        from src.jh_subsystem.job_data_service import job_data_service
        
        # 添加市场趋势摘要
        stats = await job_data_service.get_overview_statistics()
        if stats:
            market_info = []
            if "total_jobs" in stats:
                market_info.append(f"当前数据库中有{stats['total_jobs']}个职位")
            if "avg_salary" in stats:
                market_info.append(f"平均薪资为{stats['avg_salary']:,.0f}元/月")
            if "top_skills" in stats:
                top_skills = [skill['skill'] for skill in stats["top_skills"][:5]]
                market_info.append(f"热门技能包括：{', '.join(top_skills)}")
            
            if market_info:
                enhanced_context["market_summary"] = "；".join(market_info)
        
        # 添加市场洞察
        insights = await job_data_service.get_market_insights()
        if insights and "popular_skill_combinations" in insights:
            enhanced_context["skill_insights"] = insights["popular_skill_combinations"][:3]
            
    except Exception as e:
        logger.warning(f"增强上下文数据失败: {e}")
    
    return enhanced_context

# 初始化事件
@router.on_event("startup")
async def startup_event():
    """启动时检查Ollama服务"""
    try:
        service = OllamaService()
        is_available = await service.is_available()
        if is_available:
            models = await service.list_models()
            model_names = [m.get("name", "") for m in models]
            logger.info(f"Ollama服务已连接，可用模型: {', '.join(model_names)}")
        else:
            logger.warning("Ollama服务不可用，AI聊天功能将受限")
        await service.client.aclose()
    except Exception as e:
        logger.error(f"Ollama服务检查失败: {e}") 