awinml commited on
Commit
15c7806
1 Parent(s): aeaab1d

Upload 17 files

Browse files
app.py CHANGED
@@ -74,7 +74,7 @@ with st.sidebar:
74
  )
75
  )
76
 
77
- use_bm25 = st.checkbox("Use 2-Stage Retrieval (BM25)")
78
  num_candidates = int(
79
  st.number_input(
80
  "Number of Candidates to Generate:",
@@ -252,7 +252,7 @@ with tab2:
252
  for ticker, quarter, year in ticker_year_quarter_tuples_list:
253
  file_text = retrieve_transcript(data, year, quarter, ticker)
254
  with st.expander(f"See Transcript - {quarter} {year}"):
255
- st.subheader("Earnings Call Transcript - {quarter} {year}:")
256
  stx.scrollableTextbox(
257
  file_text,
258
  height=700,
@@ -262,5 +262,5 @@ with tab2:
262
 
263
  else:
264
  st.write(
265
- "No specific document chosen. Please mention Ticker and Duration in Question."
266
  )
 
74
  )
75
  )
76
 
77
+ use_bm25 = st.checkbox("Use 2-Stage Retrieval (BM25)", value=True)
78
  num_candidates = int(
79
  st.number_input(
80
  "Number of Candidates to Generate:",
 
252
  for ticker, quarter, year in ticker_year_quarter_tuples_list:
253
  file_text = retrieve_transcript(data, year, quarter, ticker)
254
  with st.expander(f"See Transcript - {quarter} {year}"):
255
+ st.subheader(f"Earnings Call Transcript - {quarter} {year}:")
256
  stx.scrollableTextbox(
257
  file_text,
258
  height=700,
 
262
 
263
  else:
264
  st.write(
265
+ "No specific document chosen. Please mention Ticker and Duration in the Question."
266
  )
utils/__pycache__/prompts.cpython-38.pyc CHANGED
Binary files a/utils/__pycache__/prompts.cpython-38.pyc and b/utils/__pycache__/prompts.cpython-38.pyc differ
 
utils/__pycache__/retriever.cpython-38.pyc CHANGED
Binary files a/utils/__pycache__/retriever.cpython-38.pyc and b/utils/__pycache__/retriever.cpython-38.pyc differ
 
utils/__pycache__/vector_index.cpython-38.pyc CHANGED
Binary files a/utils/__pycache__/vector_index.cpython-38.pyc and b/utils/__pycache__/vector_index.cpython-38.pyc differ
 
utils/models.py CHANGED
@@ -105,7 +105,6 @@ def get_vicuna_text_gen_model():
105
  return client
106
 
107
 
108
- @st.cache_resource
109
  def get_bm25_model(data):
110
  corpus = data.Text.tolist()
111
  corpus_clean = [preprocess_text(x) for x in corpus]
 
105
  return client
106
 
107
 
 
108
  def get_bm25_model(data):
109
  corpus = data.Text.tolist()
110
  corpus_clean = [preprocess_text(x) for x in corpus]