from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
import os

load_dotenv()


# 创建提示词
prompt = ChatPromptTemplate.from_template("tell me a short joke about {topic}")

# 创建llm模型
model = ChatOpenAI(api_key=os.getenv("api_key"),
                   base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
                   model="qwen-plus")
# 创建输出解释器
output_parser = StrOutputParser()
# 使用chain链在一起   实现的方法里面都有  runnable   必须是一个可运行单元
chain = prompt | model | output_parser
print(chain.invoke({"topic": "ice cream"}))
