import os
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from contextlib import asynccontextmanager
from anyio import to_thread
from .prelude import version, port, thread_size
from .middleware import ErrorLogging
from .router import api_router
from .router.mcp import mcp_app

@asynccontextmanager
async def thread_pool_lifespan(app: FastAPI):
    to_thread.current_default_thread_limiter().total_tokens = thread_size
    yield

# Combine both lifespans
@asynccontextmanager
async def combined_lifespan(app: FastAPI):
    # Run both lifespans(!必须执行MCP的lifespan)
    async with thread_pool_lifespan(app):
        async with mcp_app.lifespan(app):
            yield

# 创建FastAPI应用实例
app = FastAPI(
    title="OpenAI-API",
    description="OpenAI api server with MCP support",
    version=version,
    docs_url="/docs",
    redoc_url="/redoc",
    lifespan=combined_lifespan,
)

# 添加错误日志中间件（第一个添加，最后执行）
app.add_middleware(ErrorLogging)

# 添加CORS中间件
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # 在生产环境中应该设置具体的域名
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# 注册API路由
app.include_router(api_router, prefix="/api")

# 挂载MCP应用
app.mount("/", mcp_app, name="mcp")

# 根路径健康检查
@app.get("/", response_model=dict)
async def root():
    return {
        "message": "OpenAI api server with MCP support",
        "version": version,
        "status": "running",
        "device": os.environ.get("CUDA_VISIBLE_DEVICES", "not set"),
    }

__all__ = ["app", "version", "port"]