Spaces:
Sleeping
Sleeping
File size: 2,332 Bytes
cb3155b 9b5b26a c19d193 cb3155b 6aae614 9b5b26a a4b07ae 14b4e61 e45255d a4b07ae 49bfc49 161f95c 4368dd9 161f95c 4368dd9 161f95c 4368dd9 a4b07ae 4368dd9 161f95c 4368dd9 403af8b 4368dd9 cfa3bdd cb3155b 4368dd9 cb3155b 4368dd9 6aae614 e121372 cb3155b 13d500a 8c01ffb cb3155b 9b5b26a 8c01ffb 861422e 9b5b26a 8c01ffb 8fe992b 403af8b 8c01ffb 861422e 8fe992b cb3155b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool
import datetime
import requests
import pytz
import yaml
import re
from typing import Optional
from duckduckgo_search import DDGS
from tools.final_answer import FinalAnswerTool
from Gradio_UI import GradioUI
@tool
def dog_average_longevity(breed: str) -> str:
"""A tool that retrieves the average lifespan of a given dog breed using DuckDuckGo search.
Args:
breed: a string representing the name of the dog breed.
"""
#return "A string stating the average lifespan of the dog breed in years."
query = f"average lifespan for {breed} dog in years"
with DDGS() as ddgs:
results = list(ddgs.text(query, max_results=5))
if results:
for result in results:
lifespan = extract_lifespan(result)
if lifespan:
return f"The average lifespan of a {breed} dog is {lifespan} years."
return "No reliable lifespan information found. Please check with a veterinarian."
@tool
def extract_lifespan(text: str) -> Optional[str]:
"""
Extracts the first occurrence of a lifespan range or single number in years.
Args:
text: The input text from search results.
:return: Extracted lifespan range or single value (e.g., '10-12 years' or '13 years') or None.
"""
pattern = r"(\d{1,2}\s?(-|to)\s?\d{1,2}|\d{1,2})\s?(years|yrs)?"
match = re.search(pattern, text, re.IGNORECASE)
if match:
return match.group(1)
return None
final_answer = FinalAnswerTool()
model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id='https://wxknx1kg971u7k1n.us-east-1.aws.endpoints.huggingface.cloud',
custom_role_conversions=None,
)
# Import tool from Hugging Face Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[final_answer, dog_average_longevity, extract_lifespan, image_generation_tool], # Now includes image generation
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates
)
GradioUI(agent).launch()
|