#!/usr/bin/env python3
"""
API 网关服务，用于转换 vLLM 响应格式以兼容 OpenAI/Dify 规范
"""

import asyncio
import aiohttp
from aiohttp import web, ClientSession
import json
from typing import Dict, Any, List

# 配置后端服务地址
BACKEND_SERVICES = {
    "embedding": "http://127.0.0.1:8001",
    "reranker": "http://127.0.0.1:8002"
}

async def fetch_model_info(session: ClientSession, service_name: str, service_url: str) -> List[Dict[str, Any]]:
    """从后端服务获取模型信息"""
    try:
        async with session.get(f"{service_url}/v1/models") as response:
            if response.status == 200:
                data = await response.json()
                return data.get("data", [])
            else:
                print(f"Failed to fetch models from {service_name}: {response.status}")
                return []
    except Exception as e:
        print(f"Error fetching models from {service_name}: {e}")
        return []

def transform_model_response(vllm_response: Dict[str, Any]) -> Dict[str, Any]:
    """转换 vLLM 响应格式为 OpenAI 兼容格式"""
    # 创建符合 OpenAI 规范的响应结构
    openai_response = {
        "object": "list",
        "data": []
    }
    
    # 处理每个模型数据，移除额外字段
    if "data" in vllm_response:
        for model in vllm_response["data"]:
            # 只保留 OpenAI 规范中的字段
            openai_model = {
                "id": model.get("id"),
                "object": "model",
                "created": model.get("created", 0),
                "owned_by": model.get("owned_by", "unknown")
            }
            openai_response["data"].append(openai_model)
    
    return openai_response

async def models_handler(request: web.Request) -> web.Response:
    """处理 /v1/models 请求"""
    async with ClientSession() as session:
        # 从所有后端服务获取模型信息
        all_models = []
        
        # 获取嵌入模型信息
        embedding_models = await fetch_model_info(session, "embedding", BACKEND_SERVICES["embedding"])
        all_models.extend(embedding_models)
        
        # 获取重排序模型信息
        reranker_models = await fetch_model_info(session, "reranker", BACKEND_SERVICES["reranker"])
        all_models.extend(reranker_models)
        
        # 构造响应
        response_data = {
            "object": "list",
            "data": all_models
        }
        
        # 转换为 OpenAI 兼容格式
        transformed_response = transform_model_response(response_data)
        
        return web.json_response(transformed_response)

async def embeddings_handler(request: web.Request) -> web.Response:
    """处理 /v1/embeddings 请求，直接代理到嵌入服务"""
    async with ClientSession() as session:
        # 获取请求体
        body = await request.read()
        
        # 转发到嵌入服务
        async with session.post(
            f"{BACKEND_SERVICES['embedding']}/v1/embeddings",
            data=body,
            headers={"Content-Type": "application/json"}
        ) as response:
            response_data = await response.read()
            return web.Response(
                body=response_data,
                status=response.status,
                headers={"Content-Type": "application/json"}
            )

async def rerank_handler(request: web.Request) -> web.Response:
    """处理 /v1/rerank 请求，直接代理到重排序服务"""
    async with ClientSession() as session:
        # 获取请求体
        body = await request.read()
        
        # 转发到重排序服务
        async with session.post(
            f"{BACKEND_SERVICES['reranker']}/v1/rerank",
            data=body,
            headers={"Content-Type": "application/json"}
        ) as response:
            response_data = await response.read()
            return web.Response(
                body=response_data,
                status=response.status,
                headers={"Content-Type": "application/json"}
            )

async def health_handler(request: web.Request) -> web.Response:
    """健康检查接口"""
    return web.json_response({"status": "ok"})

def create_app() -> web.Application:
    """创建 Web 应用"""
    app = web.Application()
    
    # 路由配置
    app.router.add_get("/v1/models", models_handler)
    app.router.add_post("/v1/embeddings", embeddings_handler)
    app.router.add_post("/v1/rerank", rerank_handler)
    app.router.add_get("/health", health_handler)
    
    return app

if __name__ == "__main__":
    app = create_app()
    web.run_app(app, host="127.0.0.1", port=8000)