import asyncio
import sys
from contextlib import asynccontextmanager
from typing import List, Dict, Any, Optional

import nest_asyncio
import uvicorn
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.openapi.utils import get_openapi
from llama_index.core import Settings
from loguru import logger
from pydantic import BaseModel

from ai_platform.api import chat, upload, knowledge, auth, wiki
from ai_platform.config.resource import get_kafka_service, init_resource, get_llm, get_embedding
from ai_platform.config.settings import settings
from ai_platform.models.session import session_manager
from ai_platform.models.user import user_manager
from ai_platform.services.knowledge_service import knowledge_service

nest_asyncio.apply()

# 配置日志
logger.remove()
logger.add(
    sys.stderr,
    format="<green>{time:YYYY-MM-DD HH:mm:ss}</green> | <level>{level: <8}</level> | <cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - <level>{message}</level>",
    level=settings.log_level
)

init_resource()
Settings.llm = get_llm()
Settings.embed_model = get_embedding()


async def startup_upload_service(app: FastAPI):
    """启动上传服务"""
    try:
        kafka_service = get_kafka_service()
        app.state.kafka_service = kafka_service
        await kafka_service.start_producer()
        await kafka_service.start_consumer()
        asyncio.create_task(kafka_service.consume_messages())
        logger.info("文件上传服务启动成功")
    except Exception as e:
        logger.exception(f"文件上传服务启动失败: {e}")


async def startup_session_service():
    """启动会话管理服务"""
    try:
        await session_manager.init_tables()
        logger.info("会话管理服务启动成功")
    except Exception as e:
        logger.exception(f"会话管理服务启动失败: {e}")


async def startup_user_service():
    """启动用户管理服务"""
    try:
        await user_manager.init_tables()
        logger.info("用户管理服务启动成功")
    except Exception as e:
        logger.exception(f"用户管理服务启动失败: {e}")


async def startup_knowledge_service():
    """启动知识库服务"""
    try:
        await knowledge_service.init_tables()
        logger.info("知识库服务启动成功")
    except Exception as e:
        logger.exception(f"知识库服务启动失败: {e}")


@asynccontextmanager
async def lifespan(_: FastAPI):
    # await startup_upload_service(app)
    await startup_session_service()
    await startup_user_service()
    await startup_knowledge_service()
    logger.info("所有服务初始化完成")

    yield
    await shutdown_upload_service(app)


async def shutdown_upload_service(app: FastAPI):
    """关闭服务"""
    try:
        await app.state.kafka_service.stop_producer()
        await app.state.kafka_service.stop_consumer()
        logger.info("文件上传服务关闭成功")
    except Exception as e:
        logger.exception(f"文件上传服务关闭失败: {e}")


# 创建FastAPI应用
app = FastAPI(
    title="LlamaIndex 智能数据分析平台",
    description="基于LlamaIndex的企业级智能数据分析平台API",
    version="1.0.0",
    lifespan=lifespan,
    openapi_url="/api/v1/openapi.json",
    docs_url="/docs",
    redoc_url="/redoc",
    openapi_tags=[
        {
            "name": "Authentication",
            "description": "用户认证相关接口"
        },
        {
            "name": "chat",
            "description": "智能对话系统"
        },
        {
            "name": "文件管理",
            "description": "文件存储管理"
        },
        {
            "name": "知识库管理",
            "description": "知识库系统"
        }
    ]
)


# 自定义OpenAPI模式
def custom_openapi():
    if app.openapi_schema:
        return app.openapi_schema
    
    openapi_schema = get_openapi(
        title="LlamaIndex 智能数据分析平台 API",
        version="1.0.0",
        description="""
## 🚀 LlamaIndex 智能数据分析平台

这是一个基于 LlamaIndex 的企业级智能数据分析平台，提供以下核心功能：

### 🔐 用户认证系统
- 用户注册、登录、登出
- 基于会话的身份验证
- 权限管理和访问控制

### 💬 智能对话系统
- 基于知识库的智能问答
- 多轮对话支持
- 会话管理和历史记录

### 📁 文件管理系统
- 文件上传、下载、预览
- 支持多种文件格式
- 批量操作和存储管理

### 🧠 知识库管理
- 知识库创建和管理
- 文档向量化和索引
- 智能检索和搜索

### 🔧 技术特性
- **高性能**: 基于 FastAPI 异步框架
- **可扩展**: 支持分布式部署
- **安全**: 完整的身份验证和授权
- **智能**: 集成先进的 AI 技术

### 📚 文档和示例
- 完整的 API 文档
- 丰富的请求示例
- 详细的错误码说明
        """,
        routes=app.routes,
        contact={
            "name": "LlamaIndex 平台团队",
            "email": "support@llamaindex-platform.com",
            "url": "https://llamaindex-platform.com"
        },
        license_info={
            "name": "MIT License",
            "url": "https://opensource.org/licenses/MIT"
        },
        servers=[
            {
                "url": "http://localhost:8000",
                "description": "开发环境"
            },
            {
                "url": "https://api.llamaindex-platform.com",
                "description": "生产环境"
            }
        ],
        tags=[
            {
                "name": "Authentication",
                "description": "**用户认证相关接口** - 用户注册、登录、登出、权限验证等功能"
            },
            {
                "name": "chat",
                "description": "**智能对话系统** - 基于知识库的AI对话、会话管理、历史记录等功能"
            },
            {
                "name": "文件管理",
                "description": "**文件存储管理** - 文件上传、下载、预览、批量操作等功能"
            },
            {
                "name": "知识库管理",
                "description": "**知识库系统** - 知识库创建、文档管理、智能检索等功能"
            }
        ]
    )
    
    # 添加自定义配置
    openapi_schema["info"]["x-logo"] = {
        "url": "https://llamaindex-platform.com/logo.png"
    }
    
    app.openapi_schema = openapi_schema
    return app.openapi_schema


# 设置自定义OpenAPI模式
app.openapi = custom_openapi

# 配置CORS
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# 注册路由
app.include_router(auth.router)  # 认证API
app.include_router(chat.router)
app.include_router(upload.router)
app.include_router(knowledge.router)
app.include_router(wiki.router)

# Pydantic模型
class QueryRequest(BaseModel):
    question: str
    top_k: Optional[int] = None
    use_reranker: bool = True
    use_refiner: bool = True


class ProcessDocumentsRequest(BaseModel):
    source: str
    source_type: str = "file"
    clean: bool = True
    split: bool = True
    extract_meta: bool = True
    embed: bool = True


class EvaluationRequest(BaseModel):
    test_queries: List[Dict[str, Any]]
    metrics: Optional[List[str]] = None


@app.get("/")
async def root():
    """根路径"""
    return {
        "message": "LlamaIndex 数据分析平台",
        "version": "0.1.0",
        "status": "running"
    }


def main():
    """主入口函数"""
    uvicorn.run(
        "ai_platform.main:app",
        port=8000,
        reload=settings.debug,
        log_level=settings.log_level.lower()
    )


if __name__ == "__main__":
    main()
