import dotenv
from langchain_community.chat_models import ChatOpenAI
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough

dotenv.load_dotenv()
prompt = ChatPromptTemplate.from_template("{query}")
llm = ChatOpenAI(model="gpt-3.5-turbo-16k")
chain = {"query":RunnablePassthrough()}|prompt|llm|StrOutputParser()

"""
print(chain.invoke({"query","请讲一个关于老师的笑话"}))
"""
print(chain.invoke("你好，请讲一个关于老师的笑话"))