import os import time from typing import List from dotenv import load_dotenv from langgraph.graph.state import CompiledStateGraph from langgraph.graph import START, StateGraph, MessagesState from langgraph.prebuilt import tools_condition from langgraph.prebuilt import ToolNode from langchain_core.messages import HumanMessage, SystemMessage from langchain_google_genai import ChatGoogleGenerativeAI from langchain.tools import Tool, tool from langchain.callbacks.tracers import ConsoleCallbackHandler from tools import ( web_search, visit_webpage, wikipedia_search, extract_text_from_image, analyze_file, ) load_dotenv() class GeminiAgent: _api_key: str _model_name: str _tools: List[Tool] _llm: ChatGoogleGenerativeAI _graph: CompiledStateGraph def __init__( self ): self._api_key = os.getenv('GOOGLE_API_KEY') self._model_name = "gemini-2.0-flash" self._tools = self._setup_tools() self._llm = self._setup_llm() self._graph = self._setup_graph() def run(self, query: str) -> str: max_retries: int = 3 system_prompt: str with open('system_prompt.txt') as file: system_prompt = SystemMessage( content=file.read() ) for attempt in range(max_retries): try: # If no match found in answer bank, use the agent response = self._graph.invoke({ "messages": [ system_prompt, HumanMessage(content=query) ] }, config={'callbacks': [ConsoleCallbackHandler()]}) return response except Exception as e: sleep_time = (attempt + 1) * 3 if attempt < max_retries - 1: print(f"Attempt {attempt + 1} failed. Retrying in {sleep_time} seconds...") time.sleep(sleep_time) print(f"Error: {str(e)}") continue return f"Error processing query after {max_retries} attempts: {str(e)}" print("Agent processed all queries!") def _setup_llm(self) -> ChatGoogleGenerativeAI: return ChatGoogleGenerativeAI( model=self._model_name, google_api_key=self._api_key, temperature=0, ) def _setup_tools(self) -> List[Tool]: return [ web_search, visit_webpage, wikipedia_search, extract_text_from_image, analyze_file, ] def _setup_graph(self) -> CompiledStateGraph: llm_with_tools = self._llm.bind_tools(self._tools) def assistant(state: MessagesState): return { "messages": [ llm_with_tools.invoke(state["messages"]) ] } builder = StateGraph(MessagesState) builder.add_node("assistant", assistant) builder.add_node("tools", ToolNode(self._tools)) builder.add_edge(START, "assistant") builder.add_conditional_edges( "assistant", tools_condition, ) builder.add_edge("tools", "assistant") return builder.compile()