import uvicorn
from fastapi import FastAPI
from pydantic import BaseModel
import os
from langchain.chat_models import init_chat_model
from fastapi.responses import StreamingResponse
import requests
import json

os.environ["OPENAI_API_KEY"] = os.getenv("BAOCLOUD_DS_API_KEY")
base_url = os.getenv("BAOCLOUD_DS_BASE_URL")
model = init_chat_model(model="DeepSeek-V3", model_provider="openai", base_url=base_url)

app = FastAPI()


class MessageRequestBody(BaseModel):
    message: str
    username: str


@app.get("/")
def read_root():
    return {"Hello": "World"}


async def call_model(message):
    async for chunk in model.astream(message):
        yield chunk.content


@app.get("/chat")
async def chat(message: str):
    return StreamingResponse(call_model(message), media_type="text/html")


@app.post("/test_post")
async def test_post(body: MessageRequestBody):
    post_url = "http://localhost:9000/test/test_post"
    post_data = {
        "message": body.message,
        "username": body.username
    }
    response = requests.post(post_url, json=post_data)
    res_json = response.json()
    code = res_json.get("code")
    message = res_json.get("message")
    data = res_json.get("data")
    print(f"code: {code}, message: {message}, data: {data}")
    return res_json


if __name__ == '__main__':
    uvicorn.run('main:app', port=9999, reload=False)
