from langchain_openai import ChatOpenAI from agent.datastructures import parser,ResponseSchema model = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) model.with_structured_output(ResponseSchema) from langchain.prompts import ChatPromptTemplate,PromptTemplate, MessagesPlaceholder,SystemMessagePromptTemplate prompt = PromptTemplate( template=""" {format_instructions} Only provide a single JSON blob, beginning with '{{' and ending with '}}' /n {input} /n """, input_variables=["input"], partial_variables={"format_instructions": parser.get_format_instructions()}, ) prompt = ChatPromptTemplate.from_messages( [ ("system", "You will be given the chat so far, you should render the final answer as a JSON object"), MessagesPlaceholder(variable_name="conversation"), ] ) json_parse_chain = prompt | model | parser