Spaces:
Running
Running
geekyrakshit
commited on
Commit
•
096a26c
1
Parent(s):
1f626ee
add: docs for OpenAIModel
Browse files- docs/llm.md +3 -0
- guardrails_genie/llm.py +55 -0
- mkdocs.yml +1 -0
docs/llm.md
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
# LLM
|
2 |
+
|
3 |
+
::: guardrails_genie.llm
|
guardrails_genie/llm.py
CHANGED
@@ -6,6 +6,17 @@ from openai.types.chat import ChatCompletion
|
|
6 |
|
7 |
|
8 |
class OpenAIModel(weave.Model):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
model_name: str
|
10 |
_openai_client: OpenAI
|
11 |
|
@@ -20,6 +31,27 @@ class OpenAIModel(weave.Model):
|
|
20 |
system_prompt: Optional[str] = None,
|
21 |
messages: Optional[list[dict]] = None,
|
22 |
) -> list[dict]:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
user_prompts = [user_prompts] if isinstance(user_prompts, str) else user_prompts
|
24 |
messages = list(messages) if isinstance(messages, dict) else []
|
25 |
for user_prompt in user_prompts:
|
@@ -36,6 +68,29 @@ class OpenAIModel(weave.Model):
|
|
36 |
messages: Optional[list[dict]] = None,
|
37 |
**kwargs,
|
38 |
) -> ChatCompletion:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
messages = self.create_messages(user_prompts, system_prompt, messages)
|
40 |
if "response_format" in kwargs:
|
41 |
response = self._openai_client.beta.chat.completions.parse(
|
|
|
6 |
|
7 |
|
8 |
class OpenAIModel(weave.Model):
|
9 |
+
"""
|
10 |
+
A class to interface with OpenAI's language models using the Weave framework.
|
11 |
+
|
12 |
+
This class provides methods to create structured messages and generate predictions
|
13 |
+
using OpenAI's chat completion API. It is designed to work with both single and
|
14 |
+
multiple user prompts, and optionally includes a system prompt to guide the model's
|
15 |
+
responses.
|
16 |
+
|
17 |
+
Args:
|
18 |
+
model_name (str): The name of the OpenAI model to be used for predictions.
|
19 |
+
"""
|
20 |
model_name: str
|
21 |
_openai_client: OpenAI
|
22 |
|
|
|
31 |
system_prompt: Optional[str] = None,
|
32 |
messages: Optional[list[dict]] = None,
|
33 |
) -> list[dict]:
|
34 |
+
"""
|
35 |
+
Create a list of messages for the OpenAI chat completion API.
|
36 |
+
|
37 |
+
This function constructs a list of messages in the format required by the
|
38 |
+
OpenAI chat completion API. It takes user prompts, an optional system prompt,
|
39 |
+
and an optional list of existing messages, and combines them into a single
|
40 |
+
list of messages.
|
41 |
+
|
42 |
+
Args:
|
43 |
+
user_prompts (Union[str, list[str]]): A single user prompt or a list of
|
44 |
+
user prompts to be included in the messages.
|
45 |
+
system_prompt (Optional[str]): An optional system prompt to guide the
|
46 |
+
model's responses. If provided, it will be added at the beginning
|
47 |
+
of the messages list.
|
48 |
+
messages (Optional[list[dict]]): An optional list of existing messages
|
49 |
+
to which the new prompts will be appended. If not provided, a new
|
50 |
+
list will be created.
|
51 |
+
|
52 |
+
Returns:
|
53 |
+
list[dict]: A list of messages formatted for the OpenAI chat completion API.
|
54 |
+
"""
|
55 |
user_prompts = [user_prompts] if isinstance(user_prompts, str) else user_prompts
|
56 |
messages = list(messages) if isinstance(messages, dict) else []
|
57 |
for user_prompt in user_prompts:
|
|
|
68 |
messages: Optional[list[dict]] = None,
|
69 |
**kwargs,
|
70 |
) -> ChatCompletion:
|
71 |
+
"""
|
72 |
+
Generate a chat completion response using the OpenAI API.
|
73 |
+
|
74 |
+
This function takes user prompts, an optional system prompt, and an optional
|
75 |
+
list of existing messages to create a list of messages formatted for the
|
76 |
+
OpenAI chat completion API. It then sends these messages to the OpenAI API
|
77 |
+
to generate a chat completion response.
|
78 |
+
|
79 |
+
Args:
|
80 |
+
user_prompts (Union[str, list[str]]): A single user prompt or a list of
|
81 |
+
user prompts to be included in the messages.
|
82 |
+
system_prompt (Optional[str]): An optional system prompt to guide the
|
83 |
+
model's responses. If provided, it will be added at the beginning
|
84 |
+
of the messages list.
|
85 |
+
messages (Optional[list[dict]]): An optional list of existing messages
|
86 |
+
to which the new prompts will be appended. If not provided, a new
|
87 |
+
list will be created.
|
88 |
+
**kwargs: Additional keyword arguments to be passed to the OpenAI API
|
89 |
+
for chat completion.
|
90 |
+
|
91 |
+
Returns:
|
92 |
+
ChatCompletion: The chat completion response from the OpenAI API.
|
93 |
+
"""
|
94 |
messages = self.create_messages(user_prompts, system_prompt, messages)
|
95 |
if "response_format" in kwargs:
|
96 |
response = self._openai_client.beta.chat.completions.parse(
|
mkdocs.yml
CHANGED
@@ -59,6 +59,7 @@ extra_javascript:
|
|
59 |
|
60 |
nav:
|
61 |
- Home: 'index.md'
|
|
|
62 |
- Metrics: 'metrics.md'
|
63 |
- RegexModel: 'regex_model.md'
|
64 |
- Utils: 'utils.md'
|
|
|
59 |
|
60 |
nav:
|
61 |
- Home: 'index.md'
|
62 |
+
- LLM: 'llm.md'
|
63 |
- Metrics: 'metrics.md'
|
64 |
- RegexModel: 'regex_model.md'
|
65 |
- Utils: 'utils.md'
|