|
import sys |
|
import os |
|
import io, asyncio |
|
|
|
|
|
sys.path.insert(0, os.path.abspath('../..')) |
|
|
|
from litellm import completion |
|
import litellm |
|
litellm.num_retries = 3 |
|
litellm.success_callback = ["langfuse"] |
|
|
|
import time |
|
import pytest |
|
|
|
def test_langfuse_logging_async(): |
|
try: |
|
litellm.set_verbose = True |
|
async def _test_langfuse(): |
|
return await litellm.acompletion( |
|
model="gpt-3.5-turbo", |
|
messages=[{"role": "user", "content":"This is a test"}], |
|
max_tokens=1000, |
|
temperature=0.7, |
|
timeout=5, |
|
) |
|
response = asyncio.run(_test_langfuse()) |
|
print(f"response: {response}") |
|
except litellm.Timeout as e: |
|
pass |
|
except Exception as e: |
|
pytest.fail(f"An exception occurred - {e}") |
|
|
|
|
|
|
|
def test_langfuse_logging(): |
|
try: |
|
|
|
response = completion(model="claude-instant-1.2", |
|
messages=[{ |
|
"role": "user", |
|
"content": "Hi π - i'm claude" |
|
}], |
|
max_tokens=10, |
|
temperature=0.2, |
|
metadata={"langfuse/key": "foo"} |
|
) |
|
print(response) |
|
except litellm.Timeout as e: |
|
pass |
|
except Exception as e: |
|
print(e) |
|
|
|
test_langfuse_logging() |
|
|
|
|
|
def test_langfuse_logging_stream(): |
|
try: |
|
litellm.set_verbose=True |
|
response = completion(model="anyscale/meta-llama/Llama-2-7b-chat-hf", |
|
messages=[{ |
|
"role": "user", |
|
"content": "this is a streaming test for llama2 + langfuse" |
|
}], |
|
max_tokens=20, |
|
temperature=0.2, |
|
stream=True |
|
) |
|
print(response) |
|
for chunk in response: |
|
pass |
|
|
|
except litellm.Timeout as e: |
|
pass |
|
except Exception as e: |
|
print(e) |
|
|
|
|
|
|
|
def test_langfuse_logging_custom_generation_name(): |
|
try: |
|
litellm.set_verbose=True |
|
response = completion(model="gpt-3.5-turbo", |
|
messages=[{ |
|
"role": "user", |
|
"content": "Hi π - i'm claude" |
|
}], |
|
max_tokens=10, |
|
metadata = { |
|
"langfuse/foo": "bar", |
|
"langsmith/fizz": "buzz", |
|
"prompt_hash": "asdf98u0j9131123" |
|
} |
|
) |
|
print(response) |
|
except litellm.Timeout as e: |
|
pass |
|
except Exception as e: |
|
pytest.fail(f"An exception occurred - {e}") |
|
print(e) |
|
|
|
test_langfuse_logging_custom_generation_name() |
|
|
|
def test_langfuse_logging_function_calling(): |
|
function1 = [ |
|
{ |
|
"name": "get_current_weather", |
|
"description": "Get the current weather in a given location", |
|
"parameters": { |
|
"type": "object", |
|
"properties": { |
|
"location": { |
|
"type": "string", |
|
"description": "The city and state, e.g. San Francisco, CA", |
|
}, |
|
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, |
|
}, |
|
"required": ["location"], |
|
}, |
|
} |
|
] |
|
try: |
|
response = completion(model="gpt-3.5-turbo", |
|
messages=[{ |
|
"role": "user", |
|
"content": "what's the weather in boston" |
|
}], |
|
temperature=0.1, |
|
functions=function1, |
|
) |
|
print(response) |
|
except litellm.Timeout as e: |
|
pass |
|
except Exception as e: |
|
print(e) |
|
|
|
|
|
|
|
|
|
|
|
|