File size: 2,207 Bytes
0abd329
cbbbc40
 
 
4d2b873
cbbbc40
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
model=HfApiModel('https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'),

Alternatively - how to use Ollama local setup

https://discord.com/channels/879548962464493619/1349138711643816006/1349138711643816006
I just want to share my solution if this could be useful. 

1) I ran locally qwen2.5 model. how?
2) Download ollama from https://ollama.com/download
3) In terminal, once Ollama is installed run command: ollama pull qwen2.5:7b (some useful info: https://ollama.com/library/qwen2.5)
4) pip install smolagents, ollama
5) see script attached. The OllamaModel class was copied from other conversation in our community

from smolagents import CodeAgent, DuckDuckGoSearchTool, FinalAnswerTool, HfApiModel, Tool, tool, VisitWebpageTool
import ollama

@tool
def suggest_menu(occasion: str) -> str:
    """
    Suggests a menu based on the occasion.
    Args:
        occasion: The type of occasion for the party.
    """
    if occasion == "casual":
        return "Pizza, snacks, and drinks."
    elif occasion == "formal":
        return "3-course dinner with wine and dessert."
    elif occasion == "superhero":
        return "Buffet with high-energy and healthy food."
    else:
        return "Custom menu for the butler."


class OllamaModel:
    def __init__(self, model_name):
        self.model_name = model_name

    def __call__(self, prompt, stop_sequences=["Task"]) -> str:
        # Convert the list of prompts to a single string
        prompt_text = ""
        for item in prompt:
            if item['role'] == 'system':
                for content in item['content']:
                    if content['type'] == 'text':
                        prompt_text += content['text']
            elif item['role'] == 'user':
                for content in item['content']:
                    if content['type'] == 'text':
                        prompt_text += content['text']
        # Use Ollama's generate or chat API to handle prompts
        response = ollama.chat(model=self.model_name, messages=[{"role": "user", "content": prompt_text}], stream=False)
        return response.message

# Initialize the agent with OllamaModel
ollama_model = OllamaModel(model_name="qwen2.5")