Matt C commited on
Commit
9a33247
1 Parent(s): 7186326
Files changed (1) hide show
  1. app.py +9 -5
app.py CHANGED
@@ -1,6 +1,8 @@
1
  import streamlit as st
2
- import torch
3
  import plotly.express as px
 
 
 
4
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
5
 
6
  deftxt = "I hate you cancerous insects so much"
@@ -9,13 +11,15 @@ txt = st.text_area('Text to analyze', deftxt)
9
  # load tokenizer and model weights
10
  tokenizer = AutoTokenizer.from_pretrained("s-nlp/roberta_toxicity_classifier")
11
  model = AutoModelForSequenceClassification.from_pretrained("s-nlp/roberta_toxicity_classifier")
12
-
13
- # prepare the input
14
  batch = tokenizer.encode(txt, return_tensors='pt')
15
 
16
- # inference
17
  result = model(batch)
18
- result
 
 
 
 
19
 
20
  #fig = px.bar(result, x="", y="", orientation='h')
21
  #fig.show()
 
1
  import streamlit as st
 
2
  import plotly.express as px
3
+ import torch
4
+
5
+ from torch import nn
6
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
7
 
8
  deftxt = "I hate you cancerous insects so much"
 
11
  # load tokenizer and model weights
12
  tokenizer = AutoTokenizer.from_pretrained("s-nlp/roberta_toxicity_classifier")
13
  model = AutoModelForSequenceClassification.from_pretrained("s-nlp/roberta_toxicity_classifier")
 
 
14
  batch = tokenizer.encode(txt, return_tensors='pt')
15
 
16
+ # e.g. "logits":"tensor([[ 4.8982, -5.1952]], grad_fn=<AddmmBackward0>)"
17
  result = model(batch)
18
+
19
+ # get probabilities
20
+ prediction = nn.functional.softmax(result.logits, dim=-1)
21
+
22
+ print(prediction)
23
 
24
  #fig = px.bar(result, x="", y="", orientation='h')
25
  #fig.show()