File size: 2,257 Bytes
3b993c4
 
 
 
24d33b9
 
 
3b993c4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
from unittest.mock import AsyncMock, MagicMock

import pytest

from vsp.llm.llm_service import LLMService
from vsp.llm.prompt import Prompt
from vsp.llm.prompt_text import PromptText


@pytest.fixture
def mock_llm_service():
    service = AsyncMock(spec=LLMService)
    service.invoke.return_value = "Mocked response"
    return service


def test_prompt_initialization(mock_llm_service):
    user_prompt = PromptText("Hello, {name}!")
    prompt = Prompt(mock_llm_service, user_prompt=user_prompt)
    assert prompt._user_prompt == user_prompt


def test_prompt_update_prompts(mock_llm_service):
    user_prompt = PromptText("Hello, {name}!")
    prompt = Prompt(mock_llm_service, user_prompt=user_prompt)
    prompt.upsert_inputs({"name": "Alice"})
    assert prompt._user_prompt.get_prompt() == "Hello, Alice!"


@pytest.mark.asyncio
async def test_prompt_evaluate(mock_llm_service):
    user_prompt = PromptText("Hello, {name}!")
    prompt = Prompt(mock_llm_service, user_prompt=user_prompt)
    prompt.upsert_inputs({"name": "Bob"})
    result = await prompt.evaluate()
    assert result == "Mocked response"
    mock_llm_service.invoke.assert_called_once_with(
        user_prompt="Hello, Bob!", system_prompt=None, partial_assistant_prompt=None, max_tokens=1000, temperature=0.0
    )


def test_prompt_missing_prompts():
    with pytest.raises(ValueError):
        Prompt(MagicMock())


@pytest.mark.asyncio
async def test_prompt_with_all_parameters(mock_llm_service):
    user_prompt = PromptText("User: {user_input}")
    system_prompt = PromptText("System: {system_input}")
    assistant_prompt = PromptText("Assistant: {assistant_input}")
    prompt = Prompt(
        mock_llm_service,
        user_prompt=user_prompt,
        system_prompt=system_prompt,
        partial_assistant_prompt=assistant_prompt,
        max_tokens=500,
        temperature=0.7,
    )
    prompt.upsert_inputs({"user_input": "Hello", "system_input": "Be polite", "assistant_input": "Greetings"})
    await prompt.evaluate()
    mock_llm_service.invoke.assert_called_once_with(
        user_prompt="User: Hello",
        system_prompt="System: Be polite",
        partial_assistant_prompt="Assistant: Greetings",
        max_tokens=500,
        temperature=0.7,
    )