wjjessen commited on
Commit
e38e783
2 Parent(s): af44c43 698ca24

update code

Browse files
Files changed (1) hide show
  1. app.py +1 -3
app.py CHANGED
@@ -345,7 +345,7 @@ def main():
345
  st.success(summary_cleaned_final)
346
  with st.expander("Unformatted output"):
347
  st.write(summary)
348
- else:
349
  # Remove duplicate sentences
350
  summary_dedup = remove_duplicate_sentences(summary)
351
  # Remove incomplete last sentence
@@ -367,7 +367,6 @@ def main():
367
  first_10_tokens = input_ids[:10]
368
  first_10_tokens_text = tokenizer.convert_ids_to_tokens(first_10_tokens)
369
  st.write(first_10_tokens_text)
370
-
371
  st.write("[RecursiveCharacterTextSplitter](%s) parameters used:" % url)
372
  st.write(
373
  "        chunk_size=%s"
@@ -391,7 +390,6 @@ def main():
391
  st.write("")
392
  st.write(text_chunks[2])
393
  st.write("\n")
394
-
395
  st.write(
396
  "Extracted and cleaned text, less sentences containing excluded words:"
397
  )
 
345
  st.success(summary_cleaned_final)
346
  with st.expander("Unformatted output"):
347
  st.write(summary)
348
+ else: # T5 model
349
  # Remove duplicate sentences
350
  summary_dedup = remove_duplicate_sentences(summary)
351
  # Remove incomplete last sentence
 
367
  first_10_tokens = input_ids[:10]
368
  first_10_tokens_text = tokenizer.convert_ids_to_tokens(first_10_tokens)
369
  st.write(first_10_tokens_text)
 
370
  st.write("[RecursiveCharacterTextSplitter](%s) parameters used:" % url)
371
  st.write(
372
  "        chunk_size=%s"
 
390
  st.write("")
391
  st.write(text_chunks[2])
392
  st.write("\n")
 
393
  st.write(
394
  "Extracted and cleaned text, less sentences containing excluded words:"
395
  )