DeepGit / agent.py
zamalali
Updated background agents latency checks
ae2c609
# test.py
import os
from dotenv import load_dotenv
from github import Github, Auth
# LangChain imports
from langchain_groq import ChatGroq
from langchain_core.tools import tool
from langchain.agents import create_tool_calling_agent, AgentExecutor
# Load environment variables
load_dotenv()
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
GITHUB_PAT = os.getenv("GITHUB_API_KEY")
if not (GROQ_API_KEY and GITHUB_PAT):
raise ValueError("Please set GROQ_API_KEY and GITHUB_API_KEY in your .env")
# Initialize GitHub client
_auth = Auth.Token(GITHUB_PAT)
_gh = Github(auth=_auth)
# Define the GitHub tool
@tool
def get_repo_info(repo_name: str) -> str:
"""Fetch and summarize metadata about a GitHub repository."""
try:
repo = _gh.get_repo(repo_name)
except Exception as e:
return f" Error fetching '{repo_name}': {e}"
name = repo.full_name
desc = repo.description or "No description"
url = repo.html_url
owner = repo.owner.login
stars = repo.stargazers_count
forks = repo.forks_count
issues = repo.open_issues_count
created = repo.created_at.isoformat()
updated = repo.updated_at.isoformat()
watchers = repo.watchers_count
default_br = repo.default_branch
language = repo.language or "None"
try:
license_name = repo.get_license().license.name
except:
license_name = "None"
topics = repo.get_topics()
try:
raw_md = repo.get_readme().decoded_content.decode("utf-8")
snippet = raw_md[:300].replace("\n", " ") + "..."
except:
snippet = "No README found"
contribs = repo.get_contributors()[:5]
contrib_list = ", ".join(f"{c.login}({c.contributions})" for c in contribs)
commits = repo.get_commits()[:3]
commit_list = "; ".join(c.commit.message.split("\n")[0] for c in commits)
return f"""
Repository: {name}
Description: {desc}
URL: {url}
Owner: {owner}
⭐ Stars: {stars} 🍴 Forks: {forks} πŸ› Open Issues: {issues}
πŸ‘οΈ Watchers: {watchers} Default branch: {default_br}
βš™οΈ Language: {language} License: {license_name}
πŸ” Topics: {topics}
README Snippet: {snippet}
πŸ‘₯ Top Contributors: {contrib_list}
🧾 Latest Commits: {commit_list}
"""
# Instantiate the Groq LLM
llm = ChatGroq(
model="llama-3.1-8b-instant",
temperature=0.3,
max_tokens=1024,
api_key=GROQ_API_KEY,
)
# Define the tools to pass into the agent
tools = [get_repo_info]
# Pull default tool-calling agent prompt from LangChain hub
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
prompt = ChatPromptTemplate.from_messages([
("system",
"You are GitHub Agent, an expert at analyzing repositories.\n"
"When a user asks about a repo, call the tool and return a clear, concise summary of the repository based on the tool result.\n"
"Avoid repeating raw tool output or adding unnecessary disclaimers.\n"
"Respond in complete sentences, in natural language."
),
MessagesPlaceholder(variable_name="chat_history", optional=True),
("human", "{input}"),
MessagesPlaceholder(variable_name="agent_scratchpad"),
])
# Create the agent using LangChain's legacy AgentExecutor approach
agent = create_tool_calling_agent(llm, tools, prompt)
# Run the agent
# At the bottom of test.py
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True, max_iterations=2)
# Export it
__all__ = ["agent_executor"]
# # Quick test
if __name__ == "__main__":
import time
user_input = "Give me details about the repo zamalali/deepgit"
start_time = time.time()
result = agent_executor.invoke({"input": user_input})
end_time = time.time()
print("\n Final Answer:\n", result["output"])
print(f"\n Took {end_time - start_time:.2f} seconds")