kaushikbar commited on
Commit
d3269c3
1 Parent(s): b969c52

cleaned up

Browse files
Files changed (2) hide show
  1. app.py +6 -35
  2. requirements.txt +0 -3
app.py CHANGED
@@ -1,47 +1,19 @@
1
  import datetime
2
  import gradio as gr
3
  from huggingface_hub import hf_hub_download
4
- from langdetect import detect, DetectorFactory, detect_langs
5
- import fasttext
6
  from transformers import pipeline
7
 
8
  models = {
9
- 'en': 'Narsil/deberta-large-mnli-zero-cls', #'facebook/bart-large-mnli', # English
10
- #'es': 'Recognai/zeroshot_selectra_medium', # Spanish
11
- #'it': 'joeddav/xlm-roberta-large-xnli', # Italian
12
- #'ru': 'DeepPavlov/xlm-roberta-large-en-ru-mnli', # Russian
13
- #'tr': 'vicgalle/xlm-roberta-large-xnli-anli', # Turkish
14
- #'no': 'NbAiLab/nb-bert-base-mnli', # Norsk
15
- #'de': 'Sahajtomar/German_Zeroshot' # German
16
- }
17
 
18
  hypothesis_templates = {
19
- 'en': 'This example is {}.', # English
20
- #'es': 'Este ejemplo es {}.', # Spanish
21
- #'it': 'Questo esempio è {}.', # Italian
22
- #'ru': 'Этот пример {}.', # Russian
23
- #'tr': 'Bu örnek {}.', # Turkish
24
- #'no': 'Dette eksempelet er {}.', # Norsk
25
- #'de': 'Dieses beispiel ist {}.' # German
26
- }
27
 
28
  classifiers = {'en': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['en'],
29
- model=models['en']),
30
- #'es': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['es'],
31
- # model=models['es']),
32
- #'it': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['it'],
33
- # model=models['it']),
34
- #'ru': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['ru'],
35
- # model=models['ru']),
36
- #'tr': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['tr'],
37
- # model=models['tr']),
38
- #'no': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['no'],
39
- # model=models['no']),
40
- #'de': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['de'],
41
- # model=models['de'])
42
- }
43
-
44
- fasttext_model = fasttext.load_model(hf_hub_download("julien-c/fasttext-language-id", "lid.176.bin"))
45
 
46
  def prep_examples():
47
  example_text1 = "The tangy feel in my mouth was a different experience altogether."
@@ -211,7 +183,6 @@ iface = gr.Interface(
211
  gr.inputs.Radio(choices=[False, True],
212
  label="Multi-label?")],
213
  outputs=gr.outputs.Label(),
214
- #interpretation="default",
215
  examples=prep_examples())
216
 
217
  iface.launch()
1
  import datetime
2
  import gradio as gr
3
  from huggingface_hub import hf_hub_download
 
 
4
  from transformers import pipeline
5
 
6
  models = {
7
+ 'en': 'Narsil/deberta-large-mnli-zero-cls'
8
+ }
 
 
 
 
 
 
9
 
10
  hypothesis_templates = {
11
+ 'en': 'This example is {}.'
12
+ }
 
 
 
 
 
 
13
 
14
  classifiers = {'en': pipeline("zero-shot-classification", hypothesis_template=hypothesis_templates['en'],
15
+ model=models['en'])
16
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
 
18
  def prep_examples():
19
  example_text1 = "The tangy feel in my mouth was a different experience altogether."
183
  gr.inputs.Radio(choices=[False, True],
184
  label="Multi-label?")],
185
  outputs=gr.outputs.Label(),
 
186
  examples=prep_examples())
187
 
188
  iface.launch()
requirements.txt CHANGED
@@ -1,6 +1,3 @@
1
  transformers
2
- sentence-transformers
3
  torch
4
- langdetect
5
- fasttext
6
 
1
  transformers
 
2
  torch
 
 
3