from pydantic import BaseModel, Field
import os
from langchain.chat_models import init_chat_model

key = os.getenv("OPENAI_API_KEY")
# print(key)
api_key = str(key)

llm = init_chat_model(
    model="gpt-4o-mini",
    base_url="https://api.zetatechs.com/v1",
    api_key=api_key
)
from pydantic import BaseModel, Field


class ResponseFormatter(BaseModel):
    """Always use this tool to structure your response to the user."""
    answer: str = Field(description="The answer to the user's question")
    followup_question: str = Field(description="A followup question the user could ask")


# model_with_tools = llm.bind_tools([ResponseFormatter])
# 这是通过 绑定工具的方式进行 结构化输出的设置
# # Invoke the model
# ai_msg = model_with_tools.invoke("What is the powerhouse of the cell?")
# print(ai_msg)

# 这是第二种方式, 通过 model.with_structured_output(ResponseFormatter)来进行结构化输出
model_with_structure = llm.with_structured_output(ResponseFormatter)
structured_output = model_with_structure.invoke("What is the powerhouse of the cell?")
print(structured_output)