import asyncio

# 用于创建函数的智能代理Agent
from llama_index.core.agent.workflow import FunctionAgent
from llama_index.llms.ollama import Ollama

import os

os.environ["HF_ENDPOINT"] = "https://hf-mirror.com"

from llama_index.llms.openai_like import OpenAILike

llm = OpenAILike(
    model="doubao-seed-1-6-250615",
    api_base="https://ark.cn-beijing.volces.com/api/v3",
    api_key="d52e49a1-36ea-44bb-bc6e-65ce789a72f6",
    # Explicitly set the context window to match the model's context window
    context_window=128000,
    # Controls whether the model uses chat or completion endpoint
    is_chat_model=True,
    # Controls whether the model supports function calling
    is_function_calling_model=True,
)


def multiply(a: float, b: float) -> float:
    return a * b


# 创建一个带有计算功能的智能代理
agent = FunctionAgent(
    tools=[multiply],  # 指定代理可用的工具
    # request_timeout 定义请求的超时的时间为360秒
    # context_window 手动设置上下文窗口大小以限制内存的使用
    llm=llm,
    system_prompt="你是一个乐于助人的助手，可以计算两个数的乘积",  # 定义系统提示，描述代理的行为
)


# 定义异步函数
async def main():
    response = await agent.run("请帮我计算7乘以8等于多少?")
    print(str(response))


if __name__ == "__main__":
    asyncio.run(main())
