# /Users/xyl/Documents/ai_project/local_load_llm/main.py
import uvicorn
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from app.api.routes import router
from app.ui.interface import ChatInterface
from app.dependencies import get_llm_model
from app.config import settings
import gradio as gr
from contextlib import asynccontextmanager

@asynccontextmanager
async def lifespan(app: FastAPI):
    """应用生命周期管理"""
    try:
        # 确保配置正确设置
        from app.config import settings
        settings.MODEL_CACHE_DIR = "/Users/xyl/Downloads/TinyLlama"
        settings.MODEL_NAME = "TinyLlama"
        
        # 启动时加载模型
        get_llm_model()
        yield
    except Exception as e:
        print(f"启动时出错: {e}")
        raise
    finally:
        # 关闭时的清理代码
        pass

def create_app() -> FastAPI:
    """创建 FastAPI 应用"""
    # 确保配置正确设置
    from app.config import settings
    settings.MODEL_CACHE_DIR = "/Users/xyl/Downloads/TinyLlama"
    settings.MODEL_NAME = "TinyLlama"
    
    # 创建 Gradio 界面
    chat_interface = ChatInterface()
    gradio_app = chat_interface.create_interface()
    
    # 创建 FastAPI 应用
    app = FastAPI(
        title="TinyLlama Chat",
        description="A chat application powered by TinyLlama-1.1B",
        version="1.0.0",
        lifespan=lifespan
    )
    
    # API 路由
    app.include_router(router, prefix="/api")
    
    # 挂载 Gradio
    app = gr.mount_gradio_app(app, gradio_app, path="/")
    
    return app

def run():
    """运行应用"""
    try:
        # 确保配置正确设置
        from app.config import settings
        settings.MODEL_CACHE_DIR = "/Users/xyl/Downloads/TinyLlama"
        settings.MODEL_NAME = "TinyLlama"
        
        app = create_app()
        
        # 修改服务器配置
        uvicorn.run(
            app,
            host="127.0.0.1",  # 改用 localhost
            port=settings.PORT,
            log_level="debug",  # 增加日志级别以便调试
            timeout_keep_alive=65,
            loop="auto",
            workers=1
        )
    except Exception as e:
        print(f"应用启动失败: {e}")
        raise

if __name__ == "__main__":
    run() 