|
import os |
|
from typing import TypedDict, Annotated |
|
from dotenv import load_dotenv |
|
from langchain_core.messages import AnyMessage, HumanMessage, AIMessage, SystemMessage |
|
from langgraph.prebuilt import ToolNode |
|
from langgraph.graph import START, StateGraph, MessagesState |
|
from langgraph.prebuilt import tools_condition |
|
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace |
|
from langchain_google_genai import ChatGoogleGenerativeAI |
|
from langchain_groq import ChatGroq |
|
from langchain_openai import ChatOpenAI |
|
import requests |
|
from tools import * |
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
def buildAgent(provider="google"): |
|
|
|
with open("system_prompt.txt", "r", encoding="utf-8") as f: |
|
system_prompt = f.read() |
|
print(system_prompt) |
|
|
|
|
|
sys_msg = SystemMessage(content=system_prompt) |
|
|
|
|
|
if provider == "huggingface": |
|
llm = ChatHuggingFace( |
|
llm=HuggingFaceEndpoint(repo_id="Qwen/Qwen2.5-Coder-32B-Instruct"), |
|
) |
|
elif provider == "groq": |
|
llm = ChatGroq(model="qwen-qwq-32b") |
|
elif provider == "google": |
|
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash-exp") |
|
elif provider == "openrouter": |
|
llm = ChatOpenAI( |
|
base_url="https://openrouter.ai/api/v1", |
|
model="google/gemini-2.0-flash-001", |
|
api_key=os.getenv("OPENROUTER_API_KEY"), |
|
) |
|
else: |
|
raise ValueError("Invalid provider. Choose 'groq' or 'huggingface'.") |
|
|
|
agent_tools = [ |
|
multiply, |
|
add, |
|
subtract, |
|
divide, |
|
modulus, |
|
power, |
|
square_root, |
|
web_search, |
|
wiki_search, |
|
arxiv_search, |
|
download_file, |
|
] |
|
|
|
chat_with_tools = llm.bind_tools(agent_tools) |
|
|
|
|
|
def assistant(state: MessagesState): |
|
return { |
|
"messages": [chat_with_tools.invoke(state["messages"])], |
|
} |
|
|
|
|
|
def retriever(state: MessagesState): |
|
"""Retriever node""" |
|
|
|
return {"messages": state["messages"]} |
|
|
|
|
|
builder = StateGraph(MessagesState) |
|
|
|
builder.add_node("retriever", retriever) |
|
builder.add_node("assistant", assistant) |
|
builder.add_node("tools", ToolNode(agent_tools)) |
|
|
|
builder.add_edge(START, "retriever") |
|
builder.add_edge("retriever", "assistant") |
|
builder.add_conditional_edges( |
|
"assistant", |
|
|
|
|
|
tools_condition, |
|
) |
|
builder.add_edge("tools", "assistant") |
|
return builder.compile() |
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
|
|
|
|
|
|
question = "How many studio albums were published by Mercedes Sosa between 2000 and 2009 (included)? You can use the latest 2022 version of english wikipedia." |
|
graph = buildAgent(provider="google") |
|
messages = [HumanMessage(content=question)] |
|
print(messages) |
|
messages = graph.invoke({"messages": messages}) |
|
for m in messages["messages"]: |
|
m.pretty_print() |
|
|