from fastapi import APIRouter, Request, HTTPException, File, UploadFile, Form
from fastapi.responses import StreamingResponse, JSONResponse

import os
import time
from typing import List, Literal, Optional, Union
from pydantic import BaseModel, Field
from openai import OpenAI
from sse_starlette.sse import EventSourceResponse

from configs import config

EventSourceResponse.DEFAULT_PING_INTERVAL = 1000

router = APIRouter(prefix="/proxy/internlm/v1", tags=["书生代理"])

INTERNLM_BASE_URL = "https://internlm-chat.intern-ai.org.cn/puyu/api/v1"
INTERNLM_API_TOKEN = os.getenv("INTERNLM_API_TOKEN", config.setting.INTERNLM_API_TOKEN)

client = OpenAI(
    base_url=INTERNLM_BASE_URL,
    api_key=INTERNLM_API_TOKEN,
)


@router.post("/chat/completions")
async def chatCompletion(request: Request):
    data = await request.json()

    async def stream_response():
        response = client.chat.completions.create(
            model=data.get("model"),
            messages=data.get("messages"),
            stream=data.get("stream"),
        )
        for chunk in response:
            yield chunk.model_dump_json(exclude_unset=True)

    return EventSourceResponse(stream_response(), media_type="text/event-stream")
