from fastapi import FastAPI
from langchain_openai import ChatOpenAI
import os
from langchain_core.output_parsers import StrOutputParser
from langchain.prompts import ChatPromptTemplate
from langchain.prompts.chat import SystemMessagePromptTemplate, HumanMessagePromptTemplate
from pydantic import BaseModel
from starlette.responses import StreamingResponse

base_url = os.environ.get("BAOCLOUD_DS_BASE_URL")
model_name = os.environ.get("BAOCLOUD_DS_MODEL_NAME")
os.environ["OPENAI_API_KEY"] = os.getenv("BAOCLOUD_DS_API_KEY")

# 1、创建模型
model = ChatOpenAI(base_url=base_url, model_name=model_name)

# 2、准备提示词
system_message_prompt = SystemMessagePromptTemplate.from_template(
    "你是一个友好的智能AI助手，名字是小团团。你的性格活泼、开朗、乐观，是大家的开心果。请用符合你性格的语调回答问题。")
human_message_prompt = HumanMessagePromptTemplate.from_template("{text}")
prompt = ChatPromptTemplate.from_messages([system_message_prompt, human_message_prompt])

# 3、创建解析器
parser = StrOutputParser()

# 4、创建chain
chain = prompt | model | parser

app = FastAPI(title="小团团Agent", version="v1.0", description="小团团Agent服务")


class MessageRequestBody(BaseModel):
    text: str


async def call_model(text: str):
    async for chunk in chain.astream({"text": text}):
        yield chunk


@app.get("/hello")
async def root():
    return {"message": "Hello World"}


@app.post("/chat")
async def chat(body: MessageRequestBody):
    text = body.text
    res = chain.invoke({"text": text})
    return {
        "answer": res
    }


@app.get("/stream")
async def stream(text: str):
    return StreamingResponse(call_model(text), media_type="text/html")


if __name__ == "__main__":
    import uvicorn

    uvicorn.run(app, host="0.0.0.0", port=9000)
