Groove-GPT / app.py
LordFarquaad42's picture
fixed prof's name
470ad92
raw
history blame
2.57 kB
import streamlit as st
import chromadb
from chromadb.utils import embedding_functions
from sentence_transformers import SentenceTransformer
from openai import OpenAI
client = chromadb.PersistentClient(path="./chromadb/")
MODEL_NAME: str = "mixedbread-ai/mxbai-embed-large-v1" # ~ 0.5 gb
COLLECTION_NAME: str = "scheme"
EMBEDDING_FUNC = embedding_functions.SentenceTransformerEmbeddingFunction(model_name=MODEL_NAME)
schemer = client.get_collection(
name=COLLECTION_NAME,
embedding_function=EMBEDDING_FUNC,
)
DATA_AVAL: bool = schemer.count() > 0
APP_NAME: str = "Groove-GPT"
st.title(APP_NAME)
st.header("What is Groovy-GPT?")
st.write("Groovy-GPT is a RAG (Retrieval-Augmented Generation) model that uses ChromaDB to retrieve relevant documents and then uses OpenAI's models to generate a response.")
st.write("The model is trained on the MIT Scheme textbook and a handful of Discrete Math and Paradigms related content that Professor Troeger posted")
st.write("Data Avaliable: ", DATA_AVAL)
user_question: str = st.text_area("Enter your groovy questions here")
access_key: str = st.text_input("Enter your gpt key here", type="password")
st.markdown("*For more information about how to get an access key, read [this article](https://platform.openai.com/api-keys).*", unsafe_allow_html=True)
gpt_type: str = st.selectbox(label="Choose GPT Type", options=["gpt-3.5-turbo", "gpt-3.5-turbo-1106", "gpt-3.5-turbo-0125", "gpt-4-32k-0613", "gpt-4-0613", "gpt-4-0125-preview"], index=0)
st.markdown("*For more information about GPT types, read [this article](https://platform.openai.com/docs/models). Make sure it has money in it ☠️*", unsafe_allow_html=True)
if st.button('Query Database') & (access_key != "") & (user_question != ""):
openai_client = OpenAI(api_key=access_key)
with st.spinner('Loading...'):
st.header("Results")
# Perform the Chromadb query.
results = schemer.query(
query_texts=[user_question],
n_results=10,
include = ['documents']
)
documents = results["documents"]
response = openai_client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": "You are an expert in functional programming in Scheme, with great knowledge on programming paradigms."},
{"role": "user", "content": user_question},
{"role": "assistant", "content": str(documents)},
]
)
st.write(response.choices[0].message.content)