ale-dp commited on
Commit
34f7b43
1 Parent(s): 8f5ec2e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -21
app.py CHANGED
@@ -3,6 +3,33 @@ from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
3
  import torch
4
  import sys
5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  def generate_summary(model, tokenizer, dialogue):
7
  # Tokenize input dialogue
8
  inputs = tokenizer(dialogue, return_tensors="pt", max_length=1024, truncation=True)
@@ -14,24 +41,3 @@ def generate_summary(model, tokenizer, dialogue):
14
  # Decode and return the summary
15
  summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True, clean_up_tokenization_spaces=True)
16
  return summary
17
-
18
-
19
- st.title("Dialog Summarizer App")
20
-
21
- # User input
22
- user_input = st.text_area("Enter the dialog:")
23
- if not user_input:
24
- st.info("Please enter a dialog.")
25
- sys.exit()
26
-
27
- # Load pre-trained Pegasus model and tokenizer
28
- model_name = "ale-dp/pegasus-finetuned-dialog-summarizer"
29
- tokenizer = AutoTokenizer.from_pretrained(model_name)
30
- model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
31
-
32
- # Generate summary
33
- summary = generate_summary(model, tokenizer, user_input)
34
-
35
- # Display the generated summary
36
- st.subheader("Summary:")
37
- st.write(summary)
 
3
  import torch
4
  import sys
5
 
6
+ st.title("Dialog Summarizer App")
7
+
8
+ # User input
9
+ user_input = st.text_area("Enter the dialog:")
10
+
11
+ # Add "Summarize" and "Clear" buttons
12
+ summarize_button = st.button("Summarize")
13
+ clear_button = st.button("Clear")
14
+
15
+ # If "Clear" button is clicked, clear the user input
16
+ if clear_button:
17
+ user_input = ""
18
+
19
+ # "Summarize" button and user input, generate and display summary
20
+ if summarize_button and user_input:
21
+ # Load pre-trained Pegasus model and tokenizer
22
+ model_name = "ale-dp/pegasus-finetuned-dialog-summarizer"
23
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
24
+ model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
25
+
26
+ # Generate summary
27
+ summary = generate_summary(model, tokenizer, user_input)
28
+
29
+ # Display the generated summary
30
+ st.subheader("Generated Summary:")
31
+ st.write(summary)
32
+
33
  def generate_summary(model, tokenizer, dialogue):
34
  # Tokenize input dialogue
35
  inputs = tokenizer(dialogue, return_tensors="pt", max_length=1024, truncation=True)
 
41
  # Decode and return the summary
42
  summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True, clean_up_tokenization_spaces=True)
43
  return summary