Burcin commited on
Commit
96d3c78
·
1 Parent(s): 695cbcc

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -5
app.py CHANGED
@@ -1,4 +1,5 @@
1
  import gradio as gr
 
2
  import wikipedia
3
  import spacy
4
  from spacy.lang.en.stop_words import STOP_WORDS
@@ -9,13 +10,29 @@ nltk.download('punkt', quiet=True)
9
  from nltk.stem import WordNetLemmatizer
10
  from heapq import nlargest
11
  import warnings
12
-
 
 
13
 
14
  warnings.filterwarnings("ignore")
15
 
16
- def get_wiki_summary(inp):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  text = wikipedia.summary(inp)
18
- print('\033[1m' + "Original Text Fetched from Wikipedia" + '\033[0m')
19
 
20
  print(text)
21
 
@@ -61,5 +78,26 @@ def get_wiki_summary(inp):
61
 
62
  return summary
63
 
64
- if __name__ == '__main__':
65
- gr.Interface(fn=get_wiki_summary, inputs=gr.inputs.Textbox(label="Requested Topic from Wikipedia : "), outputs="text").launch(inline=False, share=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
+ from gradio.mix import Parallel, Series
3
  import wikipedia
4
  import spacy
5
  from spacy.lang.en.stop_words import STOP_WORDS
 
10
  from nltk.stem import WordNetLemmatizer
11
  from heapq import nlargest
12
  import warnings
13
+ from sklearn.feature_extraction.text import TfidfVectorizer
14
+ import numpy as np
15
+ from transformers import PegasusForConditionalGeneration, PegasusTokenizer
16
 
17
  warnings.filterwarnings("ignore")
18
 
19
+ def get_wiki_original_text(inp):
20
+ text = wikipedia.summary(inp)
21
+ return text
22
+
23
+
24
+ def get_wiki_summary_by_pegasus(inp):
25
+ text = wikipedia.summary(inp)
26
+ tokenizer = PegasusTokenizer.from_pretrained('google/pegasus-xsum')
27
+ tokens = tokenizer(text, truncation=True, padding="longest", return_tensors="pt")
28
+ model = PegasusForConditionalGeneration.from_pretrained("google/pegasus-xsum")
29
+ summary = model.generate(**tokens)
30
+ return tokenizer.decode(summary)
31
+
32
+
33
+
34
+ def get_wiki_summary_by_lem(inp):
35
  text = wikipedia.summary(inp)
 
36
 
37
  print(text)
38
 
 
78
 
79
  return summary
80
 
81
+
82
+
83
+
84
+ desc = """This interface allows you to summarize Wikipedia explanations. Only requirement is to write the topic. For summarization this model uses extractive summarization method and the number of sentences in the output depends on the length of the original text."""
85
+
86
+
87
+ sample = [['Europe'], ['Great Depression'], ['Crocodile Dundee']]
88
+
89
+
90
+ iface = Parallel(gr.Interface(fn=get_wiki_original_text, inputs=gr.inputs.Textbox(label="Requested Topic from Wikipedia : "), outputs="text"),
91
+ gr.Interface(fn=get_wiki_summary_by_lem, inputs=gr.inputs.Textbox(label="Requested Topic from Wikipedia : "), outputs="text"),
92
+ gr.Interface(fn=get_wiki_summary_by_pegasus, inputs=gr.inputs.Textbox(label="Requested Topic from Wikipedia : "), outputs="text"),
93
+
94
+
95
+
96
+
97
+ # get_wiki_original_text,get_wiki_summary_by_lem, get_wiki_summary_by_pegasus,
98
+ title= 'Text Summarizer',
99
+ description = desc,
100
+ examples=sample,
101
+ inputs = gr.inputs.Textbox(label="Text"))
102
+
103
+ iface.launch(inline = False)