# pip install qianfan -i https://pypi.tuna.tsinghua.edu.cn/simple
def chat_with_qianfan():
    """For basic init and call"""
    import os
    from langchain_community.chat_models import QianfanChatEndpoint
    from langchain_core.language_models.chat_models import HumanMessage

    os.environ["QIANFAN_AK"] = ""
    os.environ["QIANFAN_SK"] = ""

    chat = QianfanChatEndpoint(streaming=True)
    messages = [HumanMessage(content="Hello")]

    # 基本用法
    result = chat.invoke(messages)  # 同步调用
    print(result)  # 打印回复
    # await chat.ainvoke(messages)  # 异步调用
    # chat.batch([messages])  # 批量调用


    # 流式调用
    try:
        for chunk in chat.stream(messages):
            print(chunk.content, end="", flush=True)
    except TypeError as e:
        print("")


    # 在千帆中使用不同的模型
    chatBot = QianfanChatEndpoint(
        streaming=True,
        model="ERNIE-Bot",
    )
    messages = [HumanMessage(content="Hello")]
    result = chatBot.invoke(messages)
    print(result)


    # 模型参数，仅仅ERNIE-Bot和ERNIE-Bot-turbo支持temperature、top_p、penalty_score
    result = chat.invoke(
        [HumanMessage(content="Hello")],
        **{"top_p": 0.4, "temperature": 0.1, "penalty_score": 1},
    )
    print(result)