|
from llama_index.core.agent.workflow import AgentWorkflow |
|
from llama_index.core.workflow import Context |
|
from llama_index.core.tools import FunctionTool |
|
from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI |
|
from llama_index.tools.duckduckgo import DuckDuckGoSearchToolSpec |
|
from llama_index.tools.wikipedia import WikipediaToolSpec |
|
from llama_index.core.tools.tool_spec.load_and_search import LoadAndSearchToolSpec |
|
from llama_index.readers.web import SimpleWebPageReader |
|
from llama_index.core.tools.ondemand_loader_tool import OnDemandLoaderTool |
|
from langfuse.llama_index import LlamaIndexInstrumentor |
|
from llama_index.llms.ollama import Ollama |
|
from llama_index.core.agent.workflow import ReActAgent |
|
|
|
class BasicAgent: |
|
def __init__(self, ollama=False, langfuse=True): |
|
if not ollama: |
|
llm = HuggingFaceInferenceAPI(model_name="Qwen/Qwen2.5-Coder-32B-Instruct") |
|
else: |
|
llm = Ollama(model="mistral:latest", request_timeout=120.0) |
|
|
|
|
|
self.langfuse = langfuse |
|
if self.langfuse: |
|
self.instrumentor = LlamaIndexInstrumentor() |
|
self.instrumentor.start() |
|
|
|
|
|
tool_spec = DuckDuckGoSearchToolSpec() |
|
search_tool = FunctionTool.from_defaults(tool_spec.duckduckgo_full_search) |
|
|
|
wiki_spec = WikipediaToolSpec() |
|
wiki_search_tool = wiki_spec.to_tool_list()[1] |
|
|
|
|
|
|
|
wiki_spec = WikipediaToolSpec() |
|
wiki_search_tool = wiki_spec.to_tool_list()[1] |
|
|
|
|
|
|
|
|
|
|
|
wiki_search_tool_las = LoadAndSearchToolSpec.from_defaults(wiki_search_tool).to_tool_list() |
|
|
|
webpage_tool = OnDemandLoaderTool.from_defaults( |
|
SimpleWebPageReader(html_to_text=True), |
|
name="Webpage search tool", |
|
description="A tool for loading the content of a webpage and querying it for information", |
|
) |
|
|
|
|
|
self.agent = AgentWorkflow.from_tools_or_functions( |
|
wiki_spec.to_tool_list() + [search_tool], |
|
llm=llm, |
|
verbose=True, |
|
system_prompt = ( |
|
"You are a helpful assistant that can search the web and wikipedia for information.\n" |
|
"Only output the shortest possible answer to the question. " |
|
"Avoid additional information or explanations." |
|
) |
|
) |
|
|
|
|
|
|
|
async def __call__(self, question: str) -> str: |
|
response = await self.agent.run(user_msg=question) |
|
|
|
if self.langfuse: |
|
self.instrumentor.flush() |
|
|
|
return response.response.content.strip() |
|
|