File size: 1,880 Bytes
cbddf51
38b4c24
 
7a6bda4
 
cbddf51
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38b4c24
cbddf51
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38b4c24
 
0dd2166
38b4c24
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
from smolagents.tools import Tool
from anthropic import Anthropic, HUMAN_PROMPT, AI_PROMPT
import os
from dotenv import load_dotenv
load_dotenv()  # Loads variables from .env into environment
class MoodToNeedTool(Tool):
    """
    A tool that converts user mood descriptions into vacation needs using an LLM.

    Attributes:
        model: A callable language model used to generate the output.
    """
    name = "MoodToNeed"
    inputs = {
    "mood": {"type": "string", "description": "User's mood as text"},
}
    output_type = "string"

    description = "Converts user mood into a travel-related need."

    def __init__(self, model: callable) -> None:
        """
        Args:
            model: A callable language model with a __call__(str) -> str interface.
        """
        super().__init__()
        self.model = model

    def forward(self, mood: str) -> str:
        """
        Generates a vacation need from a user mood string.

        Args:
            mood: A string describing the user's emotional state.

        Returns:
            A short string describing the travel-related need.
        """
        prompt = (
            f"Given the user's mood, suggest a travel need.\n"
            f'Mood: "{mood}"\n'
            f'Return only the need, no explanation.\n'
            f'Example:\n'
            f'Mood: "I am exhausted" → Need: "A calm wellness retreat"\n'
            f'Mood: "{mood}"\n'
            f'Need:'
        )
        response = self.model(prompt)
        return response.strip()

client = Anthropic(api_key=os.getenv("ANTROPIC_KEY"))

def claude_mood_to_need_model(prompt: str) -> str:
    message = client.messages.create(
        model="claude-3-opus-20240229",
        max_tokens=1024,
        temperature=0.7,
        messages=[
            {"role": "user", "content": prompt}
        ]
    )
    return message.content[0].text