import sys, os
from fastapi import FastAPI, APIRouter
from pydantic import BaseModel

from llm_config.llm_config import Master

chat_app = APIRouter()


class Input_Data(BaseModel):
    input_info: str


@chat_app.post('/llm_chat_test')
async def llm_chat_test(input: Input_Data):
    """
    接口：大模型聊天接口
    Args:
        input (Input_Data): 提问打模型的内容

    Returns:
        _type_: _description_
    """
    master = Master()
    # llm_chat_resp = master.llm_chain(input.input_info)
    llm_chain = master.llm_chain()
    if input.input_info.lower() == "exit":
        return {"message": "期待下次与你相遇！"}
    llm_chat_resp = llm_chain.invoke({"input_info": input.input_info})
    return {"message": llm_chat_resp.get('text', )}
