nickmuchi commited on
Commit
be50647
β€’
1 Parent(s): 1bb7a66

Update pages/2_Earnings_Summarization_πŸ“–_.py

Browse files
pages/2_Earnings_Summarization_πŸ“–_.py CHANGED
@@ -17,18 +17,34 @@ if "earnings_passages" not in st.session_state:
17
 
18
  if st.session_state['earnings_passages']:
19
 
20
- with st.spinner("Summarizing and matching entities, this takes a few seconds..."):
21
- text_to_summarize = chunk_and_preprocess_text(st.session_state['earnings_passages'])
 
 
 
 
 
 
 
 
 
22
  summarized_text = summarize_text(text_to_summarize,max_len=max_len,min_len=min_len)
23
- entity_match_html = highlight_entities(text_to_summarize,summarized_text)
24
- st.markdown("####")
25
 
26
- with st.expander(label='Summarized Earnings Call',expanded=True):
27
- st.write(entity_match_html, unsafe_allow_html=True)
28
 
29
- st.markdown("####")
30
-
31
- summary_downloader(summarized_text)
 
 
 
 
 
 
 
 
 
32
 
33
  else:
34
  st.write("No text to summarize detected, please ensure you have entered the YouTube URL on the Sentiment Analysis page")
 
17
 
18
  if st.session_state['earnings_passages']:
19
 
20
+ with st.spinner("Summarizing and matching entities, this takes a few seconds..."):
21
+
22
+ try:
23
+ text_to_summarize = chunk_and_preprocess_text(st.session_state['earnings_passages'])
24
+ summarized_text = summarize_text(text_to_summarize,max_len=max_len,min_len=min_len)
25
+
26
+
27
+ except IndexError:
28
+ try:
29
+
30
+ text_to_summarize = chunk_and_preprocess_text(st.session_state['earnings_passages'],450)
31
  summarized_text = summarize_text(text_to_summarize,max_len=max_len,min_len=min_len)
 
 
32
 
33
+
34
+ except IndexError:
35
 
36
+ text_to_summarize = chunk_and_preprocess_text(st.session_state['earnings_passages'],400)
37
+ summarized_text = summarize_text(text_to_summarize,max_len=max_len,min_len=min_len)
38
+
39
+ entity_match_html = highlight_entities(text_to_summarize,summarized_text)
40
+ st.markdown("####")
41
+
42
+ with st.expander(label='Summarized Earnings Call',expanded=True):
43
+ st.write(entity_match_html, unsafe_allow_html=True)
44
+
45
+ st.markdown("####")
46
+
47
+ summary_downloader(summarized_text)
48
 
49
  else:
50
  st.write("No text to summarize detected, please ensure you have entered the YouTube URL on the Sentiment Analysis page")