import os
import yaml
import uuid
import time
import json
from dotenv import load_dotenv
from contextlib import asynccontextmanager
from fastapi import FastAPI, Request, Depends, HTTPException, BackgroundTasks, Query, status
from fastapi.responses import JSONResponse, StreamingResponse
from backend.middleware.trace_middleware import get_trace_id
from loguru import logger
import requests

# 加载环境变量
load_dotenv(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), '.env'))

# 导入自定义模块
from backend.api.models.api_models import *
# 导入LLM管理器
from backend.llm import llm_manager
from backend.models.model_loader import model_manager
from backend.rag.retriever import rag_manager
from backend.tools.tool_caller import tool_caller
from backend.common.config import config_manager
from backend.learning.trainer import learning_manager
from backend.database.db import db_manager
from backend.common.vector_db_manager import VectorDBManager
# 导入路由注册函数和Excel工具路由
from backend.api import register_routers
from backend.api.routers import excel_tools
from backend.middleware.cors_helper import configure_cors

# 初始化向量数据库管理器
vector_db_manager = VectorDBManager()

# 创建lifespan函数来管理应用的生命周期
@asynccontextmanager
async def lifespan(app: FastAPI):
    """应用生命周期管理，用于资源初始化和清理"""
    try:
        # 初始化关系数据库（确保数据库连接和表创建）
        try:
            # 确保数据库连接
            if hasattr(db_manager, 'connect'):
                db_manager.connect()
            # 创建数据库表结构
            db_manager.create_tables()
            logger.info("关系数据库初始化成功，表结构创建完成")
        except Exception as e:
            logger.error(f"关系数据库初始化失败: {str(e)}")
            raise
        
        # 初始化向量数据库表（含降级处理）
        try:
            vector_db_manager.create_tables()
            logger.info("向量数据库表初始化完成")
        except Exception as e:
            logger.warning(f"向量数据库表初始化失败，将使用降级模式: {str(e)}")
            # 在开发模式下，允许跳过向量库初始化
            if config_manager.startup.skip_rag:
                logger.info("RAG功能已禁用，跳过向量库初始化")
            else:
                logger.error("向量库初始化失败且RAG功能未禁用，这可能导致RAG功能不可用")
        
        yield
        
    finally:
        # 清理资源
        pass


# 从配置管理器获取API配置
api_config = config_manager.api

# 从环境变量或配置中获取OpenAI相关设置
OPENAI_BASE = os.getenv('OPENAI_BASE', 'http://127.0.0.1:11434/v1')
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY', 'ollama')

# 创建FastAPI应用
app = FastAPI(
    title="AI助手API",
    description="基于HuggingFace Transformers、FastAPI、PostgreSQL和pgvector的RAG增强AI助手",
    version="1.0.0",
    docs_url="/api/docs",
    redoc_url="/api/redoc",
    lifespan=lifespan
)

# 健康检查端点
@app.get("/healthz")
def healthz():
    # 检查ollama服务状态
    ollama_base_url = os.getenv("OLLAMA_BASE_URL", "http://127.0.0.1:11434")
    models_endpoint = f"{ollama_base_url}/api/tags"
    ollama_status = {"connected": False, "model_count": 0}
    
    try:
        response = requests.get(models_endpoint, timeout=2)
        if response.status_code == 200:
            data = response.json()
            models = data.get("models", [])
            ollama_status = {
                "connected": True,
                "model_count": len(models)
            }
            logger.info(f"成功连接到Ollama服务，发现{len(models)}个模型")
    except Exception as e:
        logger.error(f"连接Ollama服务失败: {str(e)}")
    
    # 检查数据库连接
    db_connected = False
    try:
        with db_manager.get_cursor() as cursor:
            if db_manager.db_type == "sqlite":
                cursor.execute("SELECT 1")
            else:
                cursor.execute("SELECT 1")
            cursor.fetchone()
        db_connected = True
    except Exception as e:
        logger.error(f"数据库连接失败: {str(e)}")
    
    return {
        "status": "healthy" if db_connected else "unhealthy",
        "database": {
            "connected": db_connected
        },
        "ollama": ollama_status,
        "environment": {
            "ollama_base_url": ollama_base_url,
            "app_version": "1.0.0"
        }
    }

# 获取Ollama主机配置
OLLAMA_HOST = os.getenv("OLLAMA_BASE_URL", "http://127.0.0.1:11434")

# Ollama模型列表转发接口
@app.get("/v1/llm/ollama/models")
async def get_ollama_models(request: Request, trace_id: str = Depends(get_trace_id)):
    """
    获取Ollama可用模型列表
    转发OLLAMA_HOST/api/tags接口的响应
    """
    logger.info("获取Ollama模型列表", extra={"trace_id": trace_id})
    
    try:
        # 调用Ollama API获取模型列表
        response = requests.get(f"{OLLAMA_HOST}/api/tags", timeout=10)
        response.raise_for_status()
        
        models_data = response.json()
        
        # 转换响应格式为前端友好的格式
        models = []
        if 'models' in models_data:
            models = [
                {
                    "name": model['name'],
                    "model": model['model'],
                    "modified_at": model.get('modified_at', ''),
                    "size": model.get('size', 0),
                    "details": model.get('details', {})
                }
                for model in models_data['models']
            ]
        
        return {
            "models": models,
            "total": len(models),
            "trace_id": trace_id
        }
    except requests.RequestException as e:
        logger.error(f"获取Ollama模型失败: {str(e)}", extra={"trace_id": trace_id})
        raise HTTPException(status_code=503, detail=f"Ollama服务不可用: {str(e)}")
    except Exception as e:
        logger.error(f"获取Ollama模型时发生错误: {str(e)}", extra={"trace_id": trace_id})
        raise HTTPException(status_code=500, detail=f"服务器内部错误: {str(e)}")

# 注册所有API路由
register_routers(app)
# 注册Excel工具路由
app.include_router(excel_tools.router, prefix="/v1/tools/excel", tags=["excel-tools"])
configure_cors(app)

# 全局异常处理
@app.exception_handler(Exception)
async def global_exception_handler(request: Request, exc: Exception):
    logger.error(f"请求处理异常: {str(exc)}", exc_info=True)
    return JSONResponse(
        status_code=500,
        content={"detail": str(exc), "error_type": type(exc).__name__}
    )

@app.get("/")
def root():
    """根路径，返回API信息"""
    return {
        "name": "AI助手API",
        "version": "1.0.0",
        "description": "基于HuggingFace Transformers、FastAPI、PostgreSQL和pgvector的RAG增强AI助手",
        "docs": "/api/docs"
    }

# 启动应用的入口点
if __name__ == "__main__":
    import uvicorn
    uvicorn.run(
        "backend.api.main:app",
        host=api_config['host'],
        port=api_config['port'],
        workers=api_config['workers'],
        reload=api_config['debug']
    )

