MikeJeong commited on
Commit
fa988c2
1 Parent(s): 0695e07

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -26
app.py CHANGED
@@ -40,33 +40,9 @@ if text and option:
40
  # for example, if toxic = 1, then we can say the tweet is toxic, if threat is 0, then we can say there is no threat.
41
  # if the value given by the prediction is above threshold, we put 1, 0 otherwise.
42
  with col2:
43
- dd = {
44
- "category": labels,
45
- "values": vals
46
- }
47
  st.header("Toxicity class")
48
- #out = pipe(text)
49
- thresh = 0.2
50
- cate_d = dict()
51
- cate_d["category"] = labels
52
- cate_d["values"] = []
53
- for i in range(len(labels)):
54
- if values[i] > thresh:
55
- cate_d["values"].append(1)
56
- else:
57
- cate_d["values"].append(0)
58
- df2 = pd.DataFrame(
59
- data=cate_d
60
- ).sort_values(by=['values'], ascending=False)
61
- st.write(df2)
62
  # in the third and last collumn, we display the probability of each category, sorted in descending order
63
  with col3:
64
- dd = {
65
- "category": labels,
66
- "values": vals
67
- }
68
  st.header("Probability")
69
- df3 = pd.DataFrame(
70
- data=dd
71
- ).sort_values(by=['values'], ascending=False)
72
- st.write(df3)
 
40
  # for example, if toxic = 1, then we can say the tweet is toxic, if threat is 0, then we can say there is no threat.
41
  # if the value given by the prediction is above threshold, we put 1, 0 otherwise.
42
  with col2:
 
 
 
 
43
  st.header("Toxicity class")
44
+ st.write(dd)
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  # in the third and last collumn, we display the probability of each category, sorted in descending order
46
  with col3:
 
 
 
 
47
  st.header("Probability")
48
+ st.write(dd)