darrenphodgson76's picture
Update app.py
436a549 verified
raw
history blame
5.76 kB
import os
import openai
from openai import OpenAI
import gradio as gr
import requests
import pandas as pd
from smolagents import CodeAgent, DuckDuckGoSearchTool, tool
# --- Constants ---
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
# --- Configure OpenAI SDK & Client ---
openai_api_key = os.getenv("OPENAI_API_KEY")
if not openai_api_key:
raise RuntimeError("Please set OPENAI_API_KEY in your Space secrets or env!")
openai.api_key = openai_api_key
client = OpenAI() # official OpenAI client
OPENAI_MODEL_ID = os.getenv("OPENAI_MODEL_ID", "gpt-4.1")
# --- Model Wrapper with __call__ ---
class OpenAIModelWrapper:
"""
Wraps the new OpenAI client.responses.create API so that
CodeAgent can call it directly.
"""
def __init__(self, model_id: str, client: OpenAI):
self.model_id = model_id
self.client = client
def __call__(self, prompt: str) -> str:
resp = self.client.responses.create(
model=self.model_id,
input=prompt
)
return resp.output_text
# --- Tool Definitions ---
@tool
def summarize_query(query: str) -> str:
"""
Reframes an unclear query into a better one.
Args:
query (str): The search query to refine.
Returns:
str: A concise, improved query.
"""
return f"Summarize and reframe: {query}"
@tool
def wikipedia_search(page: str) -> str:
"""
Fetches the summary extract of an English Wikipedia page.
Args:
page (str): The page title (e.g. 'Mercedes_Sosa_discography').
Returns:
str: The extract section text.
"""
url = f"https://en.wikipedia.org/api/rest_v1/page/summary/{page}"
resp = requests.get(url, timeout=10)
resp.raise_for_status()
return resp.json().get("extract", "")
search_tool = DuckDuckGoSearchTool()
wiki_tool = wikipedia_search
summarize_tool = summarize_query
# --- ReACT + Scratchpad + Auto-Retry Prompt ---
instruction_prompt = """
You are a ReACT agent with three tools:
• DuckDuckGoSearchTool(query: str)
• wikipedia_search(page: str)
• summarize_query(query: str)
Internally, for each question:
1. Thought: decide which tool to call.
2. Action: call the chosen tool.
3. Observation: record the result.
4. If empty/irrelevant:
Thought: retry with summarize_query + DuckDuckGoSearchTool.
Record new Observation.
5. Thought: integrate observations.
Finally, output exactly one line:
FINAL ANSWER: [your concise answer]
Rules:
- Numbers: digits only.
- Lists: comma-separated, no extra punctuation.
- Strings: no filler words.
"""
# --- Build the CodeAgent with the callable wrapper ---
llm_wrapper = OpenAIModelWrapper(model_id=OPENAI_MODEL_ID, client=client)
smart_agent = CodeAgent(
tools=[search_tool, wiki_tool, summarize_tool],
model=llm_wrapper
)
# --- Wrap in BasicAgent for Gradio ---
class BasicAgent:
def __init__(self):
print("SmolAgent (GPT-4.1) with ReACT & tools initialized.")
def __call__(self, question: str) -> str:
prompt = instruction_prompt.strip() + "\n\nQUESTION: " + question.strip()
print(f"Agent prompt: {prompt[:120]}…")
try:
return smart_agent.run(prompt)
except Exception as e:
return f"AGENT ERROR: {e}"
# --- Gradio & Submission Logic ---
def run_and_submit_all(profile: gr.OAuthProfile | None):
if not profile:
return "Please log in to Hugging Face.", None
username = profile.username
space_id = os.getenv("SPACE_ID", "")
agent = BasicAgent()
agent_code = f"https://huggingface.co/spaces/{space_id}/tree/main"
# 1. Fetch questions
try:
resp = requests.get(f"{DEFAULT_API_URL}/questions", timeout=15)
resp.raise_for_status()
questions = resp.json() or []
except Exception as e:
return f"Error fetching questions: {e}", None
# 2. Run agent on each question
logs, payload = [], []
for item in questions:
tid = item.get("task_id")
q = item.get("question")
if not tid or not q:
continue
ans = agent(q)
logs.append({"Task ID": tid, "Question": q, "Submitted Answer": ans})
payload.append({"task_id": tid, "submitted_answer": ans})
if not payload:
return "Agent did not produce any answers.", pd.DataFrame(logs)
# 3. Submit answers
submission = {"username": username, "agent_code": agent_code, "answers": payload}
try:
post = requests.post(f"{DEFAULT_API_URL}/submit", json=submission, timeout=60)
post.raise_for_status()
res = post.json()
status = (
f"Submission Successful!\n"
f"User: {res.get('username')}\n"
f"Overall Score: {res.get('score', 'N/A')}% "
f"({res.get('correct_count', '?')}/{res.get('total_attempted', '?')})\n"
f"Message: {res.get('message','')}"
)
return status, pd.DataFrame(logs)
except Exception as e:
return f"Submission Failed: {e}", pd.DataFrame(logs)
# --- Gradio Interface ---
with gr.Blocks() as demo:
gr.Markdown("# SmolAgent GAIA Runner 🚀")
gr.Markdown("""
**Instructions:**
1. Clone this space.
2. Add `OPENAI_API_KEY` (and optionally `OPENAI_MODEL_ID`) in Settings → Secrets.
3. Log in to Hugging Face.
4. Click **Run Evaluation & Submit All Answers**.
""")
gr.LoginButton()
run_btn = gr.Button("Run Evaluation & Submit All Answers")
status_out = gr.Textbox(label="Status", lines=5, interactive=False)
table_out = gr.DataFrame(label="Questions & Answers", wrap=True)
run_btn.click(fn=run_and_submit_all, outputs=[status_out, table_out])
if __name__ == "__main__":
demo.launch(debug=True, share=False)