#!/usr/bin/python3
# -*- coding:utf-8 -*-
"""
@File: middleware.py
@Author: lijk34925 
@Date: 2025/3/28 14:25
@desc: 请补充本模块功能说明
"""

# !/usr/bin/python3
# -*- coding:utf-8 -*-
"""
@File: middleware.py
@Author: lijk34925 
@Date: 2025/3/28 14:25
@desc: FastAPI异步中间件，实现Ollama与大模型API的异步代理通信
"""

from fastapi import FastAPI, HTTPException, Request
from fastapi.responses import JSONResponse
from pydantic import BaseModel
import httpx
import asyncio

app = FastAPI()

# 配置大模型 API 地址
MODEL_API_URL = "http://10.20.200.121:8080/uis/chat/completions"
AUTH_HEADER = "Bearer eyJhbGciOiJIUzI1NiJ9.eyJzdWIiOiJTRi16aWRvbmdodWEiLCJpYXQiOjE3MzkzMzAzMzR9.D8k8JnfOfinhF-PRNCdL0LewMYRGpTzgkwvDn_R24rI"


# 定义请求模型（根据Ollama接口规范）
class OllamaRequest(BaseModel):
    model: str = "Qwen2-72B-Instruct"
    prompt: str
    max_tokens: int = 4096
    user: str = "default_user"


# 创建全局异步HTTP客户端（连接池复用）
async_client = httpx.AsyncClient(
    timeout=30.0,
    limits=httpx.Limits(max_connections=100, max_keepalive_connections=20)
)


@app.post('/ollama')
async def ollama_proxy(request_data: OllamaRequest):
    """
    异步代理处理端点
    参数说明：
    - model: 指定的大模型名称（默认Qwen2-72B-Instruct）
    - prompt: 用户输入的提示词
    - max_tokens: 最大生成token数（默认4096）
    - user: 用户标识（默认default_user）
    """
    try:
        # 构造大模型请求负载（使用异步上下文管理器）
        payload = {
            "model": request_data.model,
            "messages": [{
                "role": "user",
                "content": request_data.prompt
            }],
            "max_tokens": request_data.max_tokens,
            "user": request_data.user
        }

        # 发起异步HTTP请求
        async with async_client as client:
            response = await client.post(
                MODEL_API_URL,
                json=payload,
                headers={
                    "Content-Type": "application/json",
                    "Authorization": AUTH_HEADER
                }
            )
            response.raise_for_status()

            # 处理大模型响应
            result = response.json()
            answer = result.get("choices", [{}])[0].get("message", {}).get("content", "")

            return JSONResponse(content={"response": answer})

    except httpx.HTTPStatusError as e:
        return JSONResponse(
            status_code=e.response.status_code,
            content={"error": f"Upstream API error: {str(e)}"}
        )
    except Exception as e:
        return JSONResponse(
            status_code=500,
            content={"error": f"Internal server error: {str(e)}"}
        )


@app.on_event("shutdown")
async def shutdown_event():
    """应用关闭时关闭HTTP连接池"""
    await async_client.aclose()


if __name__ == '__main__':
    import uvicorn

    uvicorn.run(
        app,
        host='0.0.0.0',
        port=5000,
        loop="uvloop",  # 使用更高效的uvloop事件循环
        http="httptools",  # 使用高性能HTTP解析器
        timeout_keep_alive=60  # 保持连接超时时间
    )