LordFarquaad42 commited on
Commit
580f382
1 Parent(s): 3c398c2

llm data now streams in

Browse files
Files changed (1) hide show
  1. app.py +64 -26
app.py CHANGED
@@ -6,9 +6,11 @@ from openai import OpenAI
6
 
7
  # CONSTANTS
8
  client = chromadb.PersistentClient(path="./chromadb_linux/")
9
- MODEL_NAME: str = "mixedbread-ai/mxbai-embed-large-v1" # ~ 0.5 gb
10
  COLLECTION_NAME: str = "scheme"
11
- EMBEDDING_FUNC = embedding_functions.SentenceTransformerEmbeddingFunction(model_name=MODEL_NAME)
 
 
12
  schemer = client.get_collection(
13
  name=COLLECTION_NAME,
14
  embedding_function=EMBEDDING_FUNC,
@@ -17,11 +19,15 @@ DATA_AVAL: bool = schemer.count() > 0
17
  APP_NAME: str = "Groove-GPT"
18
  history = []
19
 
20
- # INFO
21
  st.title(APP_NAME)
22
  st.header("What is Groovy-GPT?")
23
- st.write("Groovy-GPT is a RAG (Retrieval-Augmented Generation) model that uses ChromaDB to retrieve relevant documents and then uses OpenAI's models to generate a response.")
24
- st.write("The model is trained on the MIT Scheme textbook and a handful of Discrete Math and Paradigms related content that Professor Troeger posted")
 
 
 
 
25
  st.write("Data Avaliable: ", DATA_AVAL)
26
 
27
  # INPUTS
@@ -29,48 +35,80 @@ user_question: str = st.text_area("Enter your groovy questions here")
29
 
30
  remember_chat_history = st.toggle("Remember This Chat's History")
31
 
32
- temperature = st.slider(label="Creativity of Model", min_value=0.0, max_value=2.0, value=0.8)
 
 
33
  st.markdown("*High creativity will make it go crazy - keep it low*")
34
 
35
- num_samples = st.slider(label="Amount of References to Give to Model", min_value=10, max_value=100, value=10)
36
- st.markdown("*High amount will make it slow and expensive (and may not be relevant) - keep it low*")
 
 
 
 
37
 
38
  access_key: str = st.text_input("Enter your gpt key here", type="password")
39
- st.markdown("*For more information about how to get an access key, read [this article](https://platform.openai.com/api-keys). Make sure it has money in it ☠️*", unsafe_allow_html=True)
 
 
 
40
 
41
- gpt_type: str = st.selectbox(label="Choose GPT Type", options=["gpt-3.5-turbo", "gpt-3.5-turbo-1106", "gpt-3.5-turbo-0125", "gpt-4-32k-0613", "gpt-4-0613", "gpt-4-0125-preview"], index=0)
42
- st.markdown("*For more information about GPT types, read [this article](https://platform.openai.com/docs/models).*", unsafe_allow_html=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
 
44
  st.divider()
45
 
46
  # ON BUTTON CLICK
47
- if st.button('Start Scheming') & (access_key != "") & (user_question != ""):
48
  openai_client = OpenAI(api_key=access_key)
49
 
50
- with st.spinner('Loading...'):
51
  # Perform the Chromadb query.
52
  results = schemer.query(
53
- query_texts=[user_question],
54
- n_results=num_samples,
55
- include = ['documents']
56
  )
57
  documents = results["documents"]
58
  response = openai_client.chat.completions.create(
59
  model="gpt-3.5-turbo",
60
  messages=[
61
- {"role": "system", "content": "You are an expert in functional programming in R5RS, with great knowledge on programming paradigms. You wish to teach the user everything you know about programming paradigms in R5RS - so you explain everything thoroughly. Surround Latex equations in dollar signs as such Inline equation: $equation$ & Display equation: $$equation$$. You will focus your examples to work exclusively in interative and recursive apporaches"},
 
 
 
62
  {"role": "user", "content": user_question},
63
  {"role": "assistant", "content": str(documents)},
64
- {"role": "user", "content": f"Conversation History: {history}"}
65
  ],
66
- temperature=temperature
 
67
  )
68
-
69
- history.append({user_question : response.choices[0].message.content} if remember_chat_history else {})
70
-
71
- st.header("Prof Says ...")
72
- st.write(response.choices[0].message.content)
 
 
 
 
 
 
 
 
 
73
  else:
74
  st.write("Please provide an input and (valid) API key")
75
-
76
-
 
6
 
7
  # CONSTANTS
8
  client = chromadb.PersistentClient(path="./chromadb_linux/")
9
+ MODEL_NAME: str = "mixedbread-ai/mxbai-embed-large-v1" # ~ 0.5 gb
10
  COLLECTION_NAME: str = "scheme"
11
+ EMBEDDING_FUNC = embedding_functions.SentenceTransformerEmbeddingFunction(
12
+ model_name=MODEL_NAME
13
+ )
14
  schemer = client.get_collection(
15
  name=COLLECTION_NAME,
16
  embedding_function=EMBEDDING_FUNC,
 
19
  APP_NAME: str = "Groove-GPT"
20
  history = []
21
 
22
+ # INFO
23
  st.title(APP_NAME)
24
  st.header("What is Groovy-GPT?")
25
+ st.write(
26
+ "Groovy-GPT is a RAG (Retrieval-Augmented Generation) model that uses ChromaDB to retrieve relevant documents and then uses OpenAI's models to generate a response."
27
+ )
28
+ st.write(
29
+ "The model is trained on the MIT Scheme textbook and a handful of Discrete Math and Paradigms related content that Professor Troeger posted"
30
+ )
31
  st.write("Data Avaliable: ", DATA_AVAL)
32
 
33
  # INPUTS
 
35
 
36
  remember_chat_history = st.toggle("Remember This Chat's History")
37
 
38
+ temperature = st.slider(
39
+ label="Creativity of Model", min_value=0.0, max_value=2.0, value=0.8
40
+ )
41
  st.markdown("*High creativity will make it go crazy - keep it low*")
42
 
43
+ num_samples = st.slider(
44
+ label="Amount of References to Give to Model", min_value=10, max_value=100, value=10
45
+ )
46
+ st.markdown(
47
+ "*High amount will make it slow and expensive (and may not be relevant) - keep it low*"
48
+ )
49
 
50
  access_key: str = st.text_input("Enter your gpt key here", type="password")
51
+ st.markdown(
52
+ "*For more information about how to get an access key, read [this article](https://platform.openai.com/api-keys). Make sure it has money in it ☠️*",
53
+ unsafe_allow_html=True,
54
+ )
55
 
56
+ gpt_type: str = st.selectbox(
57
+ label="Choose GPT Type",
58
+ options=[
59
+ "gpt-3.5-turbo",
60
+ "gpt-3.5-turbo-1106",
61
+ "gpt-3.5-turbo-0125",
62
+ "gpt-4-32k-0613",
63
+ "gpt-4-0613",
64
+ "gpt-4-0125-preview",
65
+ ],
66
+ index=0,
67
+ )
68
+ st.markdown(
69
+ "*For more information about GPT types, read [this article](https://platform.openai.com/docs/models).*",
70
+ unsafe_allow_html=True,
71
+ )
72
 
73
  st.divider()
74
 
75
  # ON BUTTON CLICK
76
+ if st.button("Start Scheming") & (access_key != "") & (user_question != ""):
77
  openai_client = OpenAI(api_key=access_key)
78
 
79
+ with st.spinner("Loading..."):
80
  # Perform the Chromadb query.
81
  results = schemer.query(
82
+ query_texts=[user_question], n_results=num_samples, include=["documents"]
 
 
83
  )
84
  documents = results["documents"]
85
  response = openai_client.chat.completions.create(
86
  model="gpt-3.5-turbo",
87
  messages=[
88
+ {
89
+ "role": "system",
90
+ "content": "You are an expert in functional programming in R5RS, with great knowledge on programming paradigms. You wish to teach the user everything you know about programming paradigms in R5RS - so you explain everything thoroughly. Surround Latex equations in dollar signs as such Inline equation: $equation$ & Display equation: $$equation$$. You will focus your examples to work exclusively in interative and recursive apporaches",
91
+ },
92
  {"role": "user", "content": user_question},
93
  {"role": "assistant", "content": str(documents)},
94
+ {"role": "user", "content": f"Conversation History: {history}"},
95
  ],
96
+ temperature=temperature,
97
+ stream=True,
98
  )
99
+
100
+ # history.append({user_question : response.choices[0].message.content} if remember_chat_history else {})
101
+
102
+ st.header("The Mega Schemer Says ...")
103
+
104
+ text_placeholder = st.empty()
105
+
106
+ content = ""
107
+ for i, chunk in enumerate(response):
108
+ if chunk.choices[0].delta.content is not None:
109
+ # Append the chunk content to the string
110
+ content += chunk.choices[0].delta.content
111
+
112
+ text_placeholder.markdown(content)
113
  else:
114
  st.write("Please provide an input and (valid) API key")