Kevin Xu commited on
Commit
df428ae
1 Parent(s): 48781bc

start with one model

Browse files
Files changed (1) hide show
  1. app.py +14 -13
app.py CHANGED
@@ -5,18 +5,18 @@ import gradio as gr
5
  from gradio.mix import Parallel, Series
6
 
7
  io1 = gr.Interface.load('huggingface/google/pegasus-large')
8
- io2 = gr.Interface.load("huggingface/google/pegasus-cnn_dailymail")
9
- io3 = gr.Interface.load("huggingface/google/pegasus-xsum")
10
- io4 = gr.Interface.load('huggingface/google/pegasus-newsroom')
11
- io5 = gr.Interface.load("huggingface/google/pegasus-multi_news")
12
- io6 = gr.Interface.load("huggingface/google/pegasus-reddit_tifu")
13
- io7 = gr.Interface.load('huggingface/google/pegasus-arxiv')
14
- io8 = gr.Interface.load("huggingface/google/pegasus-pubmed")
15
- io9 = gr.Interface.load("huggingface/google/pegasus-wikihow")
16
- io10 = gr.Interface.load('huggingface/google/pegasus-gigaword')
17
- io11 = gr.Interface.load("huggingface/google/pegasus-billsum")
18
- io12 = gr.Interface.load("huggingface/google/pegasus-big_patent")
19
- io13 = gr.Interface.load("huggingface/google/pegasus-aeslc")
20
 
21
  desc = "Let Hugging Face models summarize texts for you. Note: Shorter articles generate faster summaries. This summarizer uses bart-large-cnn model by Facebook, pegasus by Google and distilbart-cnn-12-6 by Sshleifer. You can compare these models against each other on their performances. Sample Text input is provided!"
22
 
@@ -42,7 +42,8 @@ In this article, we will discuss the latest innovations in machine learning tech
42
 
43
  sample = [[y],[x],[z]]
44
 
45
- iface = Parallel(io1, io2, io3, io4, io5, io6, io7, io8, io9, io10, io11, io12, io13,
 
46
  theme='huggingface',
47
  title= 'Hugging Face Text Summarizer',
48
  description = desc,
 
5
  from gradio.mix import Parallel, Series
6
 
7
  io1 = gr.Interface.load('huggingface/google/pegasus-large')
8
+ # io2 = gr.Interface.load("huggingface/google/pegasus-cnn_dailymail")
9
+ # io3 = gr.Interface.load("huggingface/google/pegasus-xsum")
10
+ # io4 = gr.Interface.load('huggingface/google/pegasus-newsroom')
11
+ # io5 = gr.Interface.load("huggingface/google/pegasus-multi_news")
12
+ # io6 = gr.Interface.load("huggingface/google/pegasus-reddit_tifu")
13
+ # io7 = gr.Interface.load('huggingface/google/pegasus-arxiv')
14
+ # io8 = gr.Interface.load("huggingface/google/pegasus-pubmed")
15
+ # io9 = gr.Interface.load("huggingface/google/pegasus-wikihow")
16
+ # io10 = gr.Interface.load('huggingface/google/pegasus-gigaword')
17
+ # io11 = gr.Interface.load("huggingface/google/pegasus-billsum")
18
+ # io12 = gr.Interface.load("huggingface/google/pegasus-big_patent")
19
+ # io13 = gr.Interface.load("huggingface/google/pegasus-aeslc")
20
 
21
  desc = "Let Hugging Face models summarize texts for you. Note: Shorter articles generate faster summaries. This summarizer uses bart-large-cnn model by Facebook, pegasus by Google and distilbart-cnn-12-6 by Sshleifer. You can compare these models against each other on their performances. Sample Text input is provided!"
22
 
 
42
 
43
  sample = [[y],[x],[z]]
44
 
45
+ iface = Parallel(io1,
46
+ # io2, io3, io4, io5, io6, io7, io8, io9, io10, io11, io12, io13,
47
  theme='huggingface',
48
  title= 'Hugging Face Text Summarizer',
49
  description = desc,