File size: 388 Bytes
395201c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
# import openai
# client = openai.OpenAI(
#     api_key="anything",
#     base_url="http://0.0.0.0:8000"
# )

# # request sent to model set on litellm proxy, `litellm --model`
# response = client.chat.completions.create(model="gpt-3.5-turbo", messages = [
#     {
#         "role": "user",
#         "content": "this is a test request, write a short poem"
#     }
# ])

# print(response)