Spaces:
Running
Running
✨ feat: update model to LiteLLM and add litellm dependency
Browse files- app.py +19 -9
- requirements.txt +1 -0
app.py
CHANGED
@@ -1,8 +1,9 @@
|
|
1 |
-
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
|
2 |
import datetime
|
3 |
import requests
|
4 |
import pytz
|
5 |
import yaml
|
|
|
6 |
from tools.final_answer import FinalAnswerTool
|
7 |
from tools.visit_webpage import VisitWebpageTool
|
8 |
from tools.web_search import DuckDuckGoSearchTool
|
@@ -13,7 +14,7 @@ from Gradio_UI import GradioUI
|
|
13 |
@tool
|
14 |
def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type
|
15 |
#Keep this format for the description / args / args description but feel free to modify the tool
|
16 |
-
"""A tool that does nothing yet
|
17 |
Args:
|
18 |
arg1: the first argument
|
19 |
arg2: the second argument
|
@@ -39,16 +40,25 @@ def get_current_time_in_timezone(timezone: str) -> str:
|
|
39 |
visit_webpage = VisitWebpageTool()
|
40 |
web_search = DuckDuckGoSearchTool()
|
41 |
final_answer = FinalAnswerTool()
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
47 |
)
|
48 |
|
49 |
with open("prompts.yaml", 'r') as stream:
|
50 |
prompt_templates = yaml.safe_load(stream)
|
51 |
-
|
52 |
agent = CodeAgent(
|
53 |
model=model,
|
54 |
tools=[visit_webpage, web_search, final_answer], ## add your tools here (don't remove final answer)
|
@@ -61,4 +71,4 @@ agent = CodeAgent(
|
|
61 |
prompt_templates=prompt_templates
|
62 |
)
|
63 |
|
64 |
-
GradioUI(agent).launch()
|
|
|
1 |
+
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel, load_tool, tool, LiteLLMModel
|
2 |
import datetime
|
3 |
import requests
|
4 |
import pytz
|
5 |
import yaml
|
6 |
+
import os
|
7 |
from tools.final_answer import FinalAnswerTool
|
8 |
from tools.visit_webpage import VisitWebpageTool
|
9 |
from tools.web_search import DuckDuckGoSearchTool
|
|
|
14 |
@tool
|
15 |
def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type
|
16 |
#Keep this format for the description / args / args description but feel free to modify the tool
|
17 |
+
"""A tool that does nothing yet
|
18 |
Args:
|
19 |
arg1: the first argument
|
20 |
arg2: the second argument
|
|
|
40 |
visit_webpage = VisitWebpageTool()
|
41 |
web_search = DuckDuckGoSearchTool()
|
42 |
final_answer = FinalAnswerTool()
|
43 |
+
|
44 |
+
# hugging face is getting the hug of death so lets use litellm for now
|
45 |
+
# model = HfApiModel(
|
46 |
+
# max_tokens=2096,
|
47 |
+
# temperature=0.5,
|
48 |
+
# model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
|
49 |
+
# custom_role_conversions=None,
|
50 |
+
# )
|
51 |
+
|
52 |
+
model = LiteLLMModel(
|
53 |
+
model_id="gemini/gemini-2.0-flash-exp",
|
54 |
+
max_tokens=2096,
|
55 |
+
temperature=0.6,
|
56 |
+
api_key=os.getenv("LITELLM_API_KEY")
|
57 |
)
|
58 |
|
59 |
with open("prompts.yaml", 'r') as stream:
|
60 |
prompt_templates = yaml.safe_load(stream)
|
61 |
+
|
62 |
agent = CodeAgent(
|
63 |
model=model,
|
64 |
tools=[visit_webpage, web_search, final_answer], ## add your tools here (don't remove final answer)
|
|
|
71 |
prompt_templates=prompt_templates
|
72 |
)
|
73 |
|
74 |
+
GradioUI(agent).launch()
|
requirements.txt
CHANGED
@@ -3,3 +3,4 @@ smolagents
|
|
3 |
requests
|
4 |
duckduckgo_search
|
5 |
pandas
|
|
|
|
3 |
requests
|
4 |
duckduckgo_search
|
5 |
pandas
|
6 |
+
litellm
|