Spaces:
Sleeping
Sleeping
Vladislawoo
commited on
Commit
•
6f95ca2
1
Parent(s):
19ef4fe
Update app.py
Browse files
app.py
CHANGED
@@ -26,8 +26,8 @@ labels = ["не токсичный", "оскорбляющий", "неприст
|
|
26 |
def text2toxicity(text, aggregate=True):
|
27 |
""" Calculate toxicity of a text (if aggregate=True) or a vector of toxicity aspects (if aggregate=False)"""
|
28 |
with torch.no_grad():
|
29 |
-
inputs =
|
30 |
-
proba = torch.sigmoid(
|
31 |
|
32 |
if isinstance(text, str):
|
33 |
proba = proba[0]
|
@@ -35,7 +35,6 @@ def text2toxicity(text, aggregate=True):
|
|
35 |
if aggregate:
|
36 |
return 1 - proba.T[0] * (1 - proba.T[-1])
|
37 |
else:
|
38 |
-
# Добавленный блок кода
|
39 |
result = {}
|
40 |
for label, prob in zip(labels, proba):
|
41 |
result[label] = prob
|
@@ -112,7 +111,7 @@ def page_toxicity_analysis():
|
|
112 |
elapsed_time = time.time() - start_time
|
113 |
|
114 |
for label, prob in probs.items():
|
115 |
-
st.write(f"Вероятность {label}: {prob:.4f}")
|
116 |
|
117 |
|
118 |
def main():
|
|
|
26 |
def text2toxicity(text, aggregate=True):
|
27 |
""" Calculate toxicity of a text (if aggregate=True) or a vector of toxicity aspects (if aggregate=False)"""
|
28 |
with torch.no_grad():
|
29 |
+
inputs = toxicity_tokenizer(text, return_tensors='pt', truncation=True, padding=True).to(toxicity_model.device)
|
30 |
+
proba = torch.sigmoid(toxicity_model(**inputs).logits).cpu().numpy()
|
31 |
|
32 |
if isinstance(text, str):
|
33 |
proba = proba[0]
|
|
|
35 |
if aggregate:
|
36 |
return 1 - proba.T[0] * (1 - proba.T[-1])
|
37 |
else:
|
|
|
38 |
result = {}
|
39 |
for label, prob in zip(labels, proba):
|
40 |
result[label] = prob
|
|
|
111 |
elapsed_time = time.time() - start_time
|
112 |
|
113 |
for label, prob in probs.items():
|
114 |
+
st.write(f"Вероятность того что комментарий {label}: {prob:.4f}")
|
115 |
|
116 |
|
117 |
def main():
|