File size: 3,217 Bytes
447ebeb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
#### What this tests ####
#    This tests chaos monkeys - if random parts of the system are broken / things aren't sent correctly - what happens.
#    Expect to add more edge cases to this over time.

import os
import sys
import traceback

import pytest

sys.path.insert(
    0, os.path.abspath("../..")
)  # Adds the parent directory to the system path
import litellm
from litellm import completion, embedding
from litellm.utils import Message

# litellm.set_verbose = True
user_message = "Hello, how are you?"
messages = [{"content": user_message, "role": "user"}]
model_val = None


def test_completion_with_no_model():
    # test on empty
    with pytest.raises(TypeError):
        response = completion(messages=messages)


def test_completion_with_empty_model():
    # test on empty
    try:
        response = completion(model=model_val, messages=messages)
    except Exception as e:
        print(f"error occurred: {e}")
        pass


def test_completion_invalid_param_cohere():
    try:
        litellm.set_verbose = True
        response = completion(model="command-nightly", messages=messages, seed=12)
        pytest.fail(f"This should have failed cohere does not support `seed` parameter")
    except Exception as e:
        assert isinstance(e, litellm.UnsupportedParamsError)
        print("got an exception=", str(e))
        if "cohere does not support parameters: ['seed']" in str(e):
            pass
        else:
            pytest.fail(f"An error occurred {e}")


def test_completion_function_call_cohere():
    try:
        response = completion(
            model="command-nightly", messages=messages, functions=["TEST-FUNCTION"]
        )
        pytest.fail(f"An error occurred {e}")
    except Exception as e:
        print(e)
        pass


def test_completion_function_call_openai():
    try:
        messages = [{"role": "user", "content": "What is the weather like in Boston?"}]
        response = completion(
            model="gpt-3.5-turbo",
            messages=messages,
            functions=[
                {
                    "name": "get_current_weather",
                    "description": "Get the current weather in a given location",
                    "parameters": {
                        "type": "object",
                        "properties": {
                            "location": {
                                "type": "string",
                                "description": "The city and state, e.g. San Francisco, CA",
                            },
                            "unit": {
                                "type": "string",
                                "enum": ["celsius", "fahrenheit"],
                            },
                        },
                        "required": ["location"],
                    },
                }
            ],
        )
        print(f"response: {response}")
    except Exception:
        pass


# test_completion_function_call_openai()


def test_completion_with_no_provider():
    # test on empty
    try:
        model = "cerebras/btlm-3b-8k-base"
        response = completion(model=model, messages=messages)
    except Exception as e:
        print(f"error occurred: {e}")
        pass