ruanchaves commited on
Commit
0460b2c
1 Parent(s): 0d4d53f

simplify interface

Browse files
Files changed (2) hide show
  1. app.py +35 -25
  2. requirements.txt +2 -1
app.py CHANGED
@@ -1,6 +1,7 @@
1
  import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
3
  import torch
 
4
 
5
  article_string = "Author: <a href=\"https://huggingface.co/ruanchaves\">Ruan Chaves Rodrigues</a>. Read more about our <a href=\"https://github.com/ruanchaves/eplm\">research on the evaluation of Portuguese language models</a>."
6
 
@@ -69,6 +70,10 @@ user_friendly_name = {
69
  "ruanchaves/bert-large-portuguese-cased-assin-similarity": "BERTimbau large (ASSIN)"
70
  }
71
 
 
 
 
 
72
  model_array = []
73
 
74
  for model_name in model_list:
@@ -79,42 +84,47 @@ for model_name in model_list:
79
  model_array.append(row)
80
 
81
 
82
- def similarity(s1, s2):
 
 
83
  scores = {}
 
84
  for row in model_array:
85
- name = user_friendly_name[row["name"]]
86
- tokenizer = row["tokenizer"]
87
- model = row["model"]
88
- model_input = tokenizer(*([s1], [s2]), padding=True, return_tensors="pt")
89
- with torch.no_grad():
90
- output = model(**model_input)
91
- score = output[0][0].item()
92
- scores[name] = score
93
- assin2_scores = {k: v for k, v in scores.items() if "ASSIN 2" in k}
94
- average_score = sum(assin2_scores.values()) / len(assin2_scores)
95
- average_score_str = '{:,.2%}'.format(min(average_score,5) / 5)
96
- description = score_descriptions[round(average_score)]
97
- description_pt = score_descriptions_pt[round(average_score)]
98
- final_description = description.format(average_score_str) + "\n \n" + description_pt.format(average_score_str)
99
-
100
- for key, value in scores.items():
101
- scores[key] = '{:,.2%}'.format(min(value, 5) / 5)
102
-
103
- return final_description, scores
 
 
104
 
105
 
106
  inputs = [
107
- gr.inputs.Textbox(label="Text 1"),
108
- gr.inputs.Textbox(label="Text 2")
 
109
  ]
110
 
111
  outputs = [
112
- gr.Textbox(label="Evaluation", value=output_textbox_component_description),
113
- gr.JSON(label="Similarity scores by model", value=output_json_component_description)
114
  ]
115
 
116
 
117
- gr.Interface(fn=similarity, inputs=inputs, outputs=outputs, title=app_title,
118
  description=app_description,
119
  examples=app_examples,
120
  article = article_string).launch()
 
1
  import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
3
  import torch
4
+ from scipy.special import softmax
5
 
6
  article_string = "Author: <a href=\"https://huggingface.co/ruanchaves\">Ruan Chaves Rodrigues</a>. Read more about our <a href=\"https://github.com/ruanchaves/eplm\">research on the evaluation of Portuguese language models</a>."
7
 
 
70
  "ruanchaves/bert-large-portuguese-cased-assin-similarity": "BERTimbau large (ASSIN)"
71
  }
72
 
73
+ reverse_user_friendly_name = { v:k for k,v in user_friendly_name.items() }
74
+
75
+ user_friendly_name_list = list(user_friendly_name.values())
76
+
77
  model_array = []
78
 
79
  for model_name in model_list:
 
84
  model_array.append(row)
85
 
86
 
87
+ def predict(s1, s2, chosen_model):
88
+ if not chosen_model:
89
+ chosen_model = user_friendly_name_list[0]
90
  scores = {}
91
+ full_chosen_model_name = reverse_user_friendly_name[chosen_model]
92
  for row in model_array:
93
+ name = row["name"]
94
+ if name != full_chosen_model_name:
95
+ continue
96
+ else:
97
+ tokenizer = row["tokenizer"]
98
+ model = row["model"]
99
+ model_input = tokenizer(*([s1], [s2]), padding=True, return_tensors="pt")
100
+ with torch.no_grad():
101
+ output = model(**model_input)
102
+ logits = output[0][0].detach().numpy()
103
+ logits = softmax(logits).tolist()
104
+ break
105
+ def get_description(idx):
106
+ description = score_descriptions[idx]
107
+ description_pt = score_descriptions_pt[idx]
108
+ final_description = description + "\n \n" + description_pt
109
+ return final_description
110
+
111
+ scores = { get_description(k):v for k,v in enumerate(logits) }
112
+
113
+ return scores
114
 
115
 
116
  inputs = [
117
+ gr.inputs.Textbox(label="Question"),
118
+ gr.inputs.Textbox(label="Answer"),
119
+ gr.Dropdown(label="Model", choices=user_friendly_name_list, default=user_friendly_name_list[0])
120
  ]
121
 
122
  outputs = [
123
+ gr.Label(label="Result")
 
124
  ]
125
 
126
 
127
+ gr.Interface(fn=predict, inputs=inputs, outputs=outputs, title=app_title,
128
  description=app_description,
129
  examples=app_examples,
130
  article = article_string).launch()
requirements.txt CHANGED
@@ -1,3 +1,4 @@
1
  torch
2
  gradio
3
- transformers
 
 
1
  torch
2
  gradio
3
+ transformers
4
+ scipy