import dotenv
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough
from langchain_openai import ChatOpenAI
from pydantic import Field, BaseModel

dotenv.load_dotenv()

class QAExtra(BaseModel):
    """一个问答对工具，传递假设性问题+答案"""
    question:str = Field(description="假设性问题")
    answer:str = Field(description="假设性答案")

llm = ChatOpenAI(model = "gpt-4o-mini")
struct_llm = llm.with_structured_output(QAExtra)


prompt = ChatPromptTemplate.from_messages([
    ("system","请从用户传递的query中提取假设性问题+答案"),
    ("human","{query}")
])

chain = {"query":RunnablePassthrough()} | prompt | struct_llm
print(chain.invoke("今天星期三，晚上吃什么好"))