import os import os import requests from llama_index.llms.huggingface import HuggingFaceLLM from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI from llama_index.core.agent.workflow import AgentWorkflow from llama_index.core.tools import FunctionTool # Define tools def multiply(a: int, b: int) -> int: """Multiplies two integers and returns the resulting integer""" return a * b def divide(a: int, b: int) -> float: """Divides two integers and returns the resulting float""" return a / b def subtract(a: int, b: int) -> int: """Subtracts two integers and returns the resulting integer""" return a - b def add(a: int, b: int) -> int: """Adds two integers and returns the resulting integer""" return a + b def exponential(base: int, exponent: int) -> int: """Raises base to the exponent power and returns the resulting integer""" return base ** exponent # Define tools divide_tool = FunctionTool.from_defaults(divide) subtract_tool = FunctionTool.from_defaults(subtract) add_tool = FunctionTool.from_defaults(add) exponential_tool = FunctionTool.from_defaults(exponential) multiply_tool = FunctionTool.from_defaults(multiply) # 3. Collect all tools into a list arithmetic_tools = [add_tool, subtract_tool, multiply_tool, divide_tool, exponential_tool] # Define LLM llm = HuggingFaceInferenceAPI(model_name="Qwen/Qwen2.5-Coder-32B-Instruct") # Define web search tool def tavily_search(query: str) -> str: response = requests.post( "https://api.tavily.com/search", headers={"Content-Type": "application/json"}, json={ "api_key": os.getenv("TAVILY_API_KEY"), "query": query, "search_depth": "basic", "max_results": 3, }, ) data = response.json() results = data.get("results", []) return "\n".join(f"{r['title']} - {r['url']}" for r in results) web_search_tool = FunctionTool.from_defaults(fn=tavily_search, name="WebSearch", description="Search the web for information" ) all_tools = arithmetic_tools + [web_search_tool] # Define agent agent = AgentWorkflow.from_tools_or_functions( all_tools, llm=llm )