LordFarquaad42 commited on
Commit
1487cce
1 Parent(s): 580f382

changed layout

Browse files
Files changed (4) hide show
  1. .gitignore +2 -1
  2. app.py +62 -105
  3. database.py +16 -0
  4. params.py +67 -0
.gitignore CHANGED
@@ -1 +1,2 @@
1
- env/
 
 
1
+ env/
2
+ __pycache__/
app.py CHANGED
@@ -1,114 +1,71 @@
1
  import streamlit as st
2
- import chromadb
3
- from chromadb.utils import embedding_functions
4
- from sentence_transformers import SentenceTransformer
5
  from openai import OpenAI
 
 
6
 
7
- # CONSTANTS
8
- client = chromadb.PersistentClient(path="./chromadb_linux/")
9
- MODEL_NAME: str = "mixedbread-ai/mxbai-embed-large-v1" # ~ 0.5 gb
10
- COLLECTION_NAME: str = "scheme"
11
- EMBEDDING_FUNC = embedding_functions.SentenceTransformerEmbeddingFunction(
12
- model_name=MODEL_NAME
13
- )
14
- schemer = client.get_collection(
15
- name=COLLECTION_NAME,
16
- embedding_function=EMBEDDING_FUNC,
17
- )
18
- DATA_AVAL: bool = schemer.count() > 0
19
  APP_NAME: str = "Groove-GPT"
20
  history = []
 
21
 
22
  # INFO
23
  st.title(APP_NAME)
24
- st.header("What is Groovy-GPT?")
25
- st.write(
26
- "Groovy-GPT is a RAG (Retrieval-Augmented Generation) model that uses ChromaDB to retrieve relevant documents and then uses OpenAI's models to generate a response."
27
- )
28
- st.write(
29
- "The model is trained on the MIT Scheme textbook and a handful of Discrete Math and Paradigms related content that Professor Troeger posted"
30
- )
31
- st.write("Data Avaliable: ", DATA_AVAL)
32
 
33
- # INPUTS
34
- user_question: str = st.text_area("Enter your groovy questions here")
35
-
36
- remember_chat_history = st.toggle("Remember This Chat's History")
37
-
38
- temperature = st.slider(
39
- label="Creativity of Model", min_value=0.0, max_value=2.0, value=0.8
40
- )
41
- st.markdown("*High creativity will make it go crazy - keep it low*")
42
-
43
- num_samples = st.slider(
44
- label="Amount of References to Give to Model", min_value=10, max_value=100, value=10
45
- )
46
- st.markdown(
47
- "*High amount will make it slow and expensive (and may not be relevant) - keep it low*"
48
- )
49
-
50
- access_key: str = st.text_input("Enter your gpt key here", type="password")
51
- st.markdown(
52
- "*For more information about how to get an access key, read [this article](https://platform.openai.com/api-keys). Make sure it has money in it ☠️*",
53
- unsafe_allow_html=True,
54
- )
55
-
56
- gpt_type: str = st.selectbox(
57
- label="Choose GPT Type",
58
- options=[
59
- "gpt-3.5-turbo",
60
- "gpt-3.5-turbo-1106",
61
- "gpt-3.5-turbo-0125",
62
- "gpt-4-32k-0613",
63
- "gpt-4-0613",
64
- "gpt-4-0125-preview",
65
- ],
66
- index=0,
67
- )
68
- st.markdown(
69
- "*For more information about GPT types, read [this article](https://platform.openai.com/docs/models).*",
70
- unsafe_allow_html=True,
71
- )
72
-
73
- st.divider()
74
-
75
- # ON BUTTON CLICK
76
- if st.button("Start Scheming") & (access_key != "") & (user_question != ""):
77
- openai_client = OpenAI(api_key=access_key)
78
-
79
- with st.spinner("Loading..."):
80
- # Perform the Chromadb query.
81
- results = schemer.query(
82
- query_texts=[user_question], n_results=num_samples, include=["documents"]
83
- )
84
- documents = results["documents"]
85
- response = openai_client.chat.completions.create(
86
- model="gpt-3.5-turbo",
87
- messages=[
88
- {
89
- "role": "system",
90
- "content": "You are an expert in functional programming in R5RS, with great knowledge on programming paradigms. You wish to teach the user everything you know about programming paradigms in R5RS - so you explain everything thoroughly. Surround Latex equations in dollar signs as such Inline equation: $equation$ & Display equation: $$equation$$. You will focus your examples to work exclusively in interative and recursive apporaches",
91
- },
92
- {"role": "user", "content": user_question},
93
- {"role": "assistant", "content": str(documents)},
94
- {"role": "user", "content": f"Conversation History: {history}"},
95
- ],
96
- temperature=temperature,
97
- stream=True,
98
- )
99
-
100
- # history.append({user_question : response.choices[0].message.content} if remember_chat_history else {})
101
-
102
- st.header("The Mega Schemer Says ...")
103
-
104
- text_placeholder = st.empty()
105
-
106
- content = ""
107
- for i, chunk in enumerate(response):
108
- if chunk.choices[0].delta.content is not None:
109
- # Append the chunk content to the string
110
- content += chunk.choices[0].delta.content
111
-
112
- text_placeholder.markdown(content)
113
- else:
114
- st.write("Please provide an input and (valid) API key")
 
1
  import streamlit as st
 
 
 
2
  from openai import OpenAI
3
+ from params import params
4
+ from database import get_client
5
 
6
+ CLIENT = get_client()
 
 
 
 
 
 
 
 
 
 
 
7
  APP_NAME: str = "Groove-GPT"
8
  history = []
9
+ st.set_page_config(layout="wide")
10
 
11
  # INFO
12
  st.title(APP_NAME)
 
 
 
 
 
 
 
 
13
 
14
+ l_col, r_col = st.columns((3, 1))
15
+
16
+ # param column
17
+ with r_col:
18
+ (
19
+ submit_button,
20
+ remember_chat_history,
21
+ temperature,
22
+ num_samples,
23
+ access_key,
24
+ gpt_type,
25
+ ) = params()
26
+
27
+ # input & response
28
+ with l_col:
29
+ user_question: str = st.text_input("Enter your groovy questions here")
30
+
31
+ # ON BUTTON CLICK
32
+ if submit_button & (access_key != "") & (user_question != ""):
33
+ openai_client = OpenAI(api_key=access_key)
34
+
35
+ with st.spinner("Loading..."):
36
+ # Perform the Chromadb query.
37
+ results = CLIENT.query(
38
+ query_texts=[user_question],
39
+ n_results=num_samples,
40
+ include=["documents"],
41
+ )
42
+ documents = results["documents"]
43
+ response = openai_client.chat.completions.create(
44
+ model="gpt-3.5-turbo",
45
+ messages=[
46
+ {
47
+ "role": "system",
48
+ "content": "You are an expert in functional programming in R5RS, with great knowledge on programming paradigms. You wish to teach the user everything you know about programming paradigms in R5RS - so you explain everything thoroughly. Surround Latex equations in dollar signs as such Inline equation: $equation$ & Display equation: $$equation$$.",
49
+ },
50
+ {"role": "user", "content": user_question},
51
+ {"role": "assistant", "content": str(documents)},
52
+ {"role": "user", "content": f"Conversation History: {history}"},
53
+ ],
54
+ temperature=temperature,
55
+ stream=True,
56
+ )
57
+
58
+ st.header("The Super Duper Schemer Says ...")
59
+ text_placeholder = st.empty()
60
+
61
+ content = ""
62
+ for i, chunk in enumerate(response):
63
+ if chunk.choices[0].delta.content is not None:
64
+ # Append the chunk content to the string
65
+ content += chunk.choices[0].delta.content
66
+
67
+ text_placeholder.markdown(content)
68
+
69
+ history.append({user_question: content} if remember_chat_history else {})
70
+ else:
71
+ st.write("Please provide an input and (valid) API key")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
database.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import chromadb
2
+ from chromadb.utils import embedding_functions
3
+
4
+ def get_client():
5
+ # CONSTANTS
6
+ client = chromadb.PersistentClient(path="./chromadb_linux/")
7
+ MODEL_NAME: str = "mixedbread-ai/mxbai-embed-large-v1" # ~ 0.5 gb
8
+ COLLECTION_NAME: str = "scheme"
9
+ EMBEDDING_FUNC = embedding_functions.SentenceTransformerEmbeddingFunction(
10
+ model_name=MODEL_NAME
11
+ )
12
+ schemer = client.get_collection(
13
+ name=COLLECTION_NAME,
14
+ embedding_function=EMBEDDING_FUNC,
15
+ )
16
+ return schemer
params.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+
3
+
4
+ def params():
5
+ submit_button = st.button("Start Scheming")
6
+
7
+ # API Key
8
+ access_key: str = st.text_input("Enter your gpt key here", type="password")
9
+ st.markdown(
10
+ "*For more information about how to get an access key, read [this article](https://platform.openai.com/api-keys). Make sure it has money in it ☠️*",
11
+ unsafe_allow_html=True,
12
+ )
13
+
14
+ # Chat History
15
+ remember_chat_history = st.toggle("Remember This Chat's History")
16
+
17
+ # Temperature
18
+ temperature = st.slider(
19
+ label="Creativity of Model", min_value=0.0, max_value=2.0, value=0.8
20
+ )
21
+ st.markdown("*High creativity will make it go crazy - keep it low*")
22
+
23
+ # Number of Samples
24
+ num_samples = st.slider(
25
+ label="Amount of References to Give to Model",
26
+ min_value=1,
27
+ max_value=20,
28
+ value=10,
29
+ )
30
+ st.markdown(
31
+ "*High amount will make it slow and expensive (and may not be relevant) - keep it low*"
32
+ )
33
+
34
+ # GPT Type
35
+ gpt_type: str = st.selectbox(
36
+ label="Choose GPT Type",
37
+ options=[
38
+ "gpt-3.5-turbo",
39
+ "gpt-3.5-turbo-1106",
40
+ "gpt-3.5-turbo-0125",
41
+ "gpt-4-32k-0613",
42
+ "gpt-4-0125-preview",
43
+ "gpt-4-turbo",
44
+ ],
45
+ index=0,
46
+ )
47
+ st.markdown(
48
+ "*For more information about GPT types, read [this article](https://platform.openai.com/docs/models).*",
49
+ unsafe_allow_html=True,
50
+ )
51
+
52
+ st.subheader("What is Groovy-GPT?")
53
+ st.write(
54
+ "Groovy-GPT is a RAG (Retrieval-Augmented Generation) model that uses ChromaDB to retrieve relevant documents and then uses OpenAI's models to generate a response."
55
+ )
56
+ st.write(
57
+ "The model is trained on the MIT Scheme textbook and a handful of Discrete Math and Paradigms related content that Professor Troeger posted"
58
+ )
59
+
60
+ return (
61
+ submit_button,
62
+ remember_chat_history,
63
+ temperature,
64
+ num_samples,
65
+ access_key,
66
+ gpt_type,
67
+ )