# import os

# from langchain_core.messages import HumanMessage, SystemMessage

# from langchain_openai import OpenAI


# client = OpenAI(
#     model="qwen-plus",
#     api_key=os.getenv("DASHSCOPE_API_KEY"),
#     base_url="https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"
# )

# messages=(
#     SystemMessage("你是个聊天解闷的机器人."),
#     HumanMessage("给我讲个关于水果的笑话"),
#     # AsssistantMessage("The meaning of life is to live."),
#     # UserMessage("What is the meaning of life?"),
# )

# completion = client.invoke(messages)
# print(completion.model_dump_json())


import os
from openai import OpenAI

client = OpenAI(
    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
    api_key=os.getenv("DASHSCOPE_API_KEY")
)

completion = client.completions.create(
  model="qwen2.5-coder-32b-instruct",
  prompt="<|fim_prefix|>写一个python的快速排序函数，def quick_sort(arr):<|fim_suffix|>",
)

print(completion.choices[0].text)