vinhnx90 commited on
Commit
fa9f583
β€’
1 Parent(s): 259cbe8

Update app

Browse files
Files changed (1) hide show
  1. app.py +9 -9
app.py CHANGED
@@ -47,7 +47,7 @@ def load_and_process_file(file_data):
47
  chunk_overlap=200,
48
  )
49
  chunks = text_splitter.split_documents(documents)
50
- embeddings = OpenAIEmbeddings(openai_api_key=st.secrets.openai_api_key)
51
  vector_store = Chroma.from_documents(chunks, embeddings)
52
  return vector_store
53
 
@@ -60,7 +60,7 @@ def initialize_chat_model(vector_store):
60
  llm = ChatOpenAI(
61
  model="gpt-3.5-turbo",
62
  temperature=0,
63
- openai_api_key=st.secrets.openai_api_key,
64
  )
65
  retriever = vector_store.as_retriever()
66
  return ConversationalRetrievalChain.from_llm(llm, retriever)
@@ -72,12 +72,12 @@ def main():
72
  """
73
 
74
  if "openai_api_key" in st.secrets:
75
- openai_api_key = st.secrets.openai_api_key
76
  else:
77
  openai_api_key = st.sidebar.text_input("OpenAI API Key", type="password")
78
- st.secrets.openai_api_key = openai_api_key
79
 
80
- if not st.secrets.openai_api_key:
81
  st.info("Please add your OpenAI API key to continue.")
82
 
83
  assistant_message = "Hello, you can upload a document and chat with me to ask questions related to its content. Start by adding OpenAI API Key in the sidebar."
@@ -89,7 +89,7 @@ def main():
89
 
90
  if prompt := st.chat_input(
91
  placeholder="Chat with your document",
92
- disabled=(not st.secrets.openai_api_key),
93
  ):
94
  st.session_state.messages.append(User(message=prompt).build_message())
95
  st.chat_message(ChatProfileRoleEnum.User).write(prompt)
@@ -121,7 +121,7 @@ def handle_question(question):
121
  with st.chat_message(ChatProfileRoleEnum.Assistant):
122
  stream_handler = StreamHandler(st.empty())
123
  llm = ChatOpenAI(
124
- openai_api_key=st.secrets.openai_api_key,
125
  streaming=True,
126
  callbacks=[stream_handler],
127
  )
@@ -161,9 +161,9 @@ def build_sidebar():
161
 
162
  add_file = st.button(
163
  "Process File",
164
- disabled=(not uploaded_file and not st.secrets.openai_api_key),
165
  )
166
- if add_file and uploaded_file and st.secrets.openai_api_key.startswith("sk-"):
167
  with st.spinner("πŸ’­ Thinking..."):
168
  vector_store = load_and_process_file(uploaded_file)
169
 
 
47
  chunk_overlap=200,
48
  )
49
  chunks = text_splitter.split_documents(documents)
50
+ embeddings = OpenAIEmbeddings(openai_api_key=st.secrets.OPENAI_API_KEY)
51
  vector_store = Chroma.from_documents(chunks, embeddings)
52
  return vector_store
53
 
 
60
  llm = ChatOpenAI(
61
  model="gpt-3.5-turbo",
62
  temperature=0,
63
+ openai_api_key=st.secrets.OPENAI_API_KEY,
64
  )
65
  retriever = vector_store.as_retriever()
66
  return ConversationalRetrievalChain.from_llm(llm, retriever)
 
72
  """
73
 
74
  if "openai_api_key" in st.secrets:
75
+ openai_api_key = st.secrets.OPENAI_API_KEY
76
  else:
77
  openai_api_key = st.sidebar.text_input("OpenAI API Key", type="password")
78
+ st.secrets.OPENAI_API_KEY = openai_api_key
79
 
80
+ if not st.secrets.OPENAI_API_KEY:
81
  st.info("Please add your OpenAI API key to continue.")
82
 
83
  assistant_message = "Hello, you can upload a document and chat with me to ask questions related to its content. Start by adding OpenAI API Key in the sidebar."
 
89
 
90
  if prompt := st.chat_input(
91
  placeholder="Chat with your document",
92
+ disabled=(not st.secrets.OPENAI_API_KEY),
93
  ):
94
  st.session_state.messages.append(User(message=prompt).build_message())
95
  st.chat_message(ChatProfileRoleEnum.User).write(prompt)
 
121
  with st.chat_message(ChatProfileRoleEnum.Assistant):
122
  stream_handler = StreamHandler(st.empty())
123
  llm = ChatOpenAI(
124
+ openai_api_key=st.secrets.OPENAI_API_KEY,
125
  streaming=True,
126
  callbacks=[stream_handler],
127
  )
 
161
 
162
  add_file = st.button(
163
  "Process File",
164
+ disabled=(not uploaded_file and not st.secrets.OPENAI_API_KEY),
165
  )
166
+ if add_file and uploaded_file and st.secrets.OPENAI_API_KEY.startswith("sk-"):
167
  with st.spinner("πŸ’­ Thinking..."):
168
  vector_store = load_and_process_file(uploaded_file)
169