andreped commited on
Commit
298864d
1 Parent(s): 8cdd0c9

Moved keys and hidden variables into streamlit secrets

Browse files
Files changed (3) hide show
  1. .gitignore +2 -0
  2. app.py +5 -7
  3. chatbot/utils.py +10 -11
.gitignore CHANGED
@@ -2,3 +2,5 @@ venv/
2
  data/
3
  .DS_Store
4
  config.json
 
 
 
2
  data/
3
  .DS_Store
4
  config.json
5
+ .streamlit/
6
+ secrets.toml
app.py CHANGED
@@ -1,25 +1,23 @@
1
- import json
2
 
3
  import streamlit as st
4
 
5
  from chatbot.utils import download_test_data
6
  from chatbot.utils import load_data
7
 
 
 
 
8
  # Initialize message history
9
  st.header("Chat with André's research 💬 📚")
10
 
11
  if "messages" not in st.session_state.keys(): # Initialize the chat message history
12
  st.session_state.messages = [{"role": "assistant", "content": "Ask me a question about André's research!"}]
13
 
14
- # Load config values
15
- with open(r"config.json") as config_file:
16
- config_details = json.load(config_file)
17
-
18
-
19
  def main():
20
  # setup dataset
21
  download_test_data()
22
- index = load_data(config_details)
23
  chat_engine = index.as_chat_engine(chat_mode="condense_question", verbose=True)
24
 
25
  if prompt := st.chat_input("Your question"): # Prompt for user input and save to chat history
 
1
+ import os
2
 
3
  import streamlit as st
4
 
5
  from chatbot.utils import download_test_data
6
  from chatbot.utils import load_data
7
 
8
+ # add OpenAI API key to environemntal variables
9
+ os.environ["OPENAI_API_KEY"] = st.secrets["OPENAI_API_KEY"]
10
+
11
  # Initialize message history
12
  st.header("Chat with André's research 💬 📚")
13
 
14
  if "messages" not in st.session_state.keys(): # Initialize the chat message history
15
  st.session_state.messages = [{"role": "assistant", "content": "Ask me a question about André's research!"}]
16
 
 
 
 
 
 
17
  def main():
18
  # setup dataset
19
  download_test_data()
20
+ index = load_data()
21
  chat_engine = index.as_chat_engine(chat_mode="condense_question", verbose=True)
22
 
23
  if prompt := st.chat_input("Your question"): # Prompt for user input and save to chat history
chatbot/utils.py CHANGED
@@ -14,22 +14,21 @@ from llama_index.llms import AzureOpenAI
14
  def download_test_data():
15
  # url = f"https://drive.google.com/drive/folders/uc?export=download&confirm=pbef&id={file_id}"
16
  url = "https://drive.google.com/drive/folders/1uDSAWtLvp1YPzfXUsK_v6DeWta16pq6y"
17
- with st.spinner(text="Downloading test data. Might take a few seconds."):
18
  download_folder(url=url, quiet=False, use_cookies=False, output="./data/")
19
 
20
-
21
  @st.cache_resource(show_spinner=False)
22
- def load_data(config_details):
23
  with st.spinner(text="Loading and indexing the provided dataset – hang tight! This may take a few seconds."):
24
  documents = SimpleDirectoryReader(input_dir="./data", recursive=True).load_data()
25
  llm = AzureOpenAI(
26
  model="gpt-3.5-turbo",
27
- engine=config_details["ENGINE"],
28
  temperature=0.5,
29
- api_key=os.getenv("OPENAI_API_KEY"),
30
- api_base=config_details["OPENAI_API_BASE"],
31
  api_type="azure",
32
- api_version=config_details["OPENAI_API_VERSION"],
33
  system_prompt="You are an expert on André's research and your job is to answer"
34
  "technical questions. Assume that all questions are related to"
35
  "André's research. Keep your answers technical and based on facts"
@@ -38,11 +37,11 @@ def load_data(config_details):
38
  # You need to deploy your own embedding model as well as your own chat completion model
39
  embed_model = OpenAIEmbedding(
40
  model="text-embedding-ada-002",
41
- deployment_name=config_details["ENGINE_EMBEDDING"],
42
- api_key=os.getenv("OPENAI_API_KEY"),
43
- api_base=config_details["OPENAI_API_BASE"],
44
  api_type="azure",
45
- api_version=config_details["OPENAI_API_VERSION"],
46
  )
47
  service_context = ServiceContext.from_defaults(llm=llm, embed_model=embed_model)
48
  set_global_service_context(service_context)
 
14
  def download_test_data():
15
  # url = f"https://drive.google.com/drive/folders/uc?export=download&confirm=pbef&id={file_id}"
16
  url = "https://drive.google.com/drive/folders/1uDSAWtLvp1YPzfXUsK_v6DeWta16pq6y"
17
+ with st.spinner(text="Downloading test data. This might take a minute."):
18
  download_folder(url=url, quiet=False, use_cookies=False, output="./data/")
19
 
 
20
  @st.cache_resource(show_spinner=False)
21
+ def load_data():
22
  with st.spinner(text="Loading and indexing the provided dataset – hang tight! This may take a few seconds."):
23
  documents = SimpleDirectoryReader(input_dir="./data", recursive=True).load_data()
24
  llm = AzureOpenAI(
25
  model="gpt-3.5-turbo",
26
+ engine=st.secrets["ENGINE"],
27
  temperature=0.5,
28
+ api_key=os.environ["OPENAI_API_KEY"],
29
+ api_base=st.secrets["OPENAI_API_BASE"],
30
  api_type="azure",
31
+ api_version=st.secrets["OPENAI_API_VERSION"],
32
  system_prompt="You are an expert on André's research and your job is to answer"
33
  "technical questions. Assume that all questions are related to"
34
  "André's research. Keep your answers technical and based on facts"
 
37
  # You need to deploy your own embedding model as well as your own chat completion model
38
  embed_model = OpenAIEmbedding(
39
  model="text-embedding-ada-002",
40
+ deployment_name=st.secrets["ENGINE_EMBEDDING"],
41
+ api_key=os.environ["OPENAI_API_KEY"],
42
+ api_base=st.secrets["OPENAI_API_BASE"],
43
  api_type="azure",
44
+ api_version=st.secrets["OPENAI_API_VERSION"],
45
  )
46
  service_context = ServiceContext.from_defaults(llm=llm, embed_model=embed_model)
47
  set_global_service_context(service_context)