from fastapi import FastAPI
from langchain_openai import ChatOpenAI
import os
from langchain_core.output_parsers import StrOutputParser
from langchain.prompts import ChatPromptTemplate
from langchain.prompts.chat import SystemMessagePromptTemplate, HumanMessagePromptTemplate
from pydantic import BaseModel
from starlette.responses import StreamingResponse
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain_mongodb import MongoDBChatMessageHistory

base_url = os.environ.get("BAOCLOUD_DS_BASE_URL")
model_name = os.environ.get("BAOCLOUD_DS_MODEL_NAME")
os.environ["OPENAI_API_KEY"] = os.getenv("BAOCLOUD_DS_API_KEY")

# 1、创建模型
model = ChatOpenAI(base_url=base_url, model_name=model_name)

# 2、准备提示词，添加对历史记录的处理
system_message_prompt = SystemMessagePromptTemplate.from_template(
    "你是一个友好的智能AI助手，名字是小团团。你的性格活泼、开朗、乐观，是大家的开心果。请用符合你性格的语调回答问题，回答问题的字数控制在100字以内。")
human_message_prompt = HumanMessagePromptTemplate.from_template("{text}")
prompt = ChatPromptTemplate.from_messages([
    system_message_prompt,
    ("placeholder", "{history}"),
    human_message_prompt
])

# 3、创建解析器
parser = StrOutputParser()

# 4、创建chain
chain = prompt | model | parser

# MongoDB配置
MONGODB_CONNECTION_STRING = os.environ.get("MONGODB_CONNECTION_STRING", "mongodb://centos.server:27017")
DATABASE_NAME = os.environ.get("MONGODB_DATABASE_NAME", "chat_history_db")
COLLECTION_NAME = os.environ.get("MONGODB_COLLECTION_NAME", "chat_histories")


# 创建带历史记录的chain
def get_history(session_id: str):
    return MongoDBChatMessageHistory(
        session_id=session_id,
        connection_string=MONGODB_CONNECTION_STRING,
        database_name=DATABASE_NAME,
        collection_name=COLLECTION_NAME
    )


chain_with_history = RunnableWithMessageHistory(
    chain,
    get_history,
    input_messages_key="text",
    history_messages_key="history"
)

# 创建FastAPI应用服务
app = FastAPI(title="小团团Agent", version="v1.0", description="小团团Agent服务")


class MessageRequestBody(BaseModel):
    text: str
    session_id: str = "default_session"


async def call_model(text: str, session_id: str):
    async for chunk in chain_with_history.astream(
            {"text": text},
            config={"configurable": {"session_id": session_id}}
    ):
        yield chunk


@app.get("/hello")
async def root():
    return {"message": "Hello World"}


@app.post("/chat")
async def chat(body: MessageRequestBody):
    text = body.text
    session_id = body.session_id
    res = await chain_with_history.ainvoke(
        {"text": text},
        config={"configurable": {"session_id": session_id}}
    )
    return {
        "answer": res
    }


@app.get("/stream")
async def stream(text: str, session_id: str = "default_session"):
    return StreamingResponse(call_model(text, session_id), media_type="text/html")


if __name__ == "__main__":
    import uvicorn

    uvicorn.run(app, host="0.0.0.0", port=9000)
