geekyrakshit commited on
Commit
b850722
1 Parent(s): 34cc174

initial commit

Browse files
.gitignore CHANGED
@@ -160,3 +160,6 @@ cython_debug/
160
  # and can be added to the global gitignore or merged into this file. For a more nuclear
161
  # option (not recommended) you can uncomment the following to ignore the entire idea folder.
162
  #.idea/
 
 
 
 
160
  # and can be added to the global gitignore or merged into this file. For a more nuclear
161
  # option (not recommended) you can uncomment the following to ignore the entire idea folder.
162
  #.idea/
163
+
164
+ cursor_prompts/
165
+ uv.lock
app.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import weave
3
+ from dotenv import load_dotenv
4
+
5
+ from guardrails_genie.llm import OpenAIModel
6
+
7
+ load_dotenv()
8
+ weave.init(project_name="guardrails-genie")
9
+
10
+ st.title("Echo Bot")
11
+
12
+ # Initialize chat history
13
+ if "messages" not in st.session_state:
14
+ st.session_state.messages = []
15
+
16
+ llm_model = OpenAIModel(model_name="gpt-4o-mini")
17
+
18
+ # Display chat messages from history on app rerun
19
+ for message in st.session_state.messages:
20
+ with st.chat_message(message["role"]):
21
+ st.markdown(message["content"])
22
+
23
+ # React to user input
24
+ if prompt := st.chat_input("What is up?"):
25
+ # Display user message in chat message container
26
+ st.chat_message("user").markdown(prompt)
27
+ # Add user message to chat history
28
+ st.session_state.messages.append({"role": "user", "content": prompt})
29
+
30
+ response = llm_model.predict(prompt, messages=st.session_state.messages)
31
+ response = response.choices[0].message.content
32
+ # Display assistant response in chat message container
33
+ with st.chat_message("assistant"):
34
+ st.markdown(response)
35
+ # Add assistant response to chat history
36
+ st.session_state.messages.append({"role": "assistant", "content": response})
guardrails_genie/__init__.py ADDED
File without changes
guardrails_genie/llm.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional, Union
2
+
3
+ import weave
4
+ from openai import OpenAI
5
+ from openai.types.chat import ChatCompletion
6
+
7
+
8
+ class OpenAIModel(weave.Model):
9
+ model_name: str
10
+ _openai_client: OpenAI
11
+
12
+ def __init__(self, model_name: str = "gpt-4o") -> None:
13
+ super().__init__(model_name=model_name)
14
+ self._openai_client = OpenAI()
15
+
16
+ @weave.op()
17
+ def create_messages(
18
+ self,
19
+ user_prompts: Union[str, list[str]],
20
+ system_prompt: Optional[str] = None,
21
+ messages: Optional[list[dict]] = None,
22
+ ) -> list[dict]:
23
+ user_prompts = [user_prompts] if isinstance(user_prompts, str) else user_prompts
24
+ messages = list(messages) if isinstance(messages, dict) else []
25
+ for user_prompt in user_prompts:
26
+ messages.append({"role": "user", "content": user_prompt})
27
+ if system_prompt is not None:
28
+ messages = [{"role": "system", "content": system_prompt}] + messages
29
+ return messages
30
+
31
+ @weave.op()
32
+ def predict(
33
+ self,
34
+ user_prompts: Union[str, list[str]],
35
+ system_prompt: Optional[str] = None,
36
+ messages: Optional[list[dict]] = None,
37
+ **kwargs,
38
+ ) -> ChatCompletion:
39
+ messages = self.create_messages(user_prompts, system_prompt, messages)
40
+ response = self._openai_client.chat.completions.create(
41
+ model=self.model_name, messages=messages, **kwargs
42
+ )
43
+ return response
pyproject.toml ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "guardrails-genie"
3
+ version = "0.0.1"
4
+ description = ""
5
+ readme = "README.md"
6
+ requires-python = ">=3.10"
7
+ dependencies = [
8
+ "google-generativeai>=0.8.3",
9
+ "openai>=1.52.2",
10
+ "isort>=5.13.2",
11
+ "black>=24.10.0",
12
+ "ruff>=0.6.9",
13
+ "pip>=24.2",
14
+ "uv>=0.4.20",
15
+ "weave>=0.51.19",
16
+ "streamlit>=1.40.1",
17
+ "python-dotenv>=1.0.1",
18
+ "watchdog>=6.0.0",
19
+ ]
20
+
21
+ [tool.setuptools]
22
+ py-modules = ["guardrails_genie"]