import os

from fastapi.responses import StreamingResponse
import json
from fastapi import HTTPException, Depends, Header

from demo.main import ChatRequest, app, client


#添加流式响应
@app.post("/chat/stream")
async def chat_completion_stream(request: ChatRequest):
    try:
        def generate():
            stream = client.chat.completions.create(
                model=request.model,
                messages=[{"role": "user", "content": request.message}],
                max_tokens=request.max_tokens,
                temperature=request.temperature,
                stream=True
            )

            for chunk in stream:
                if chunk.choices[0].delta.content is not None:
                    yield f"data: {json.dumps({'content': chunk.choices[0].delta.content})}\n\n"

        return StreamingResponse(generate(), media_type="text/plain")

    except Exception as e:
        raise HTTPException(status_code=500, detail=f"流式请求失败: {str(e)}")

#添加认证中间件
async def verify_token(x_api_key: str = Header(...)):
    if x_api_key != os.getenv("YOUR_API_SECRET"):
        raise HTTPException(status_code=403, detail="无效的 API Key")

@app.post("/chat", dependencies=[Depends(verify_token)])
async def protected_chat_completion(request: ChatRequest):
    # 原有的聊天逻辑
    pass

from slowapi import Limiter, _rate_limit_exceeded_handler
from slowapi.util import get_remote_address
from slowapi.errors import RateLimitExceeded

limiter = Limiter(key_func=get_remote_address)
app.state.limiter = limiter
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)

#添加速率限制
@app.post("/chat")
@limiter.limit("10/minute")
async def chat_completion(request: ChatRequest):
    # 原有的聊天逻辑
    pass