Poe Dator commited on
Commit
35bd6d3
1 Parent(s): 2ffe758

caching added

Browse files
Files changed (1) hide show
  1. app.py +16 -13
app.py CHANGED
@@ -6,13 +6,6 @@ from time import time
6
  # device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
7
  device = 'cpu'
8
 
9
- st.markdown("### Privet, mir!")
10
- st.markdown("<img width=200px src='https://i.pinimg.com/736x/11/33/19/113319f0ffe91f4bb0f468914b9916da.jpg'>", unsafe_allow_html=True)
11
-
12
- text = st.text_area("ENTER TEXT HERE")
13
- start_time = time()
14
- st.markdown("INFERENCE STARTS ...")
15
-
16
  # dict for decoding / enclding labels
17
  labels = {'cs.NE': 0, 'cs.CL': 1, 'cs.AI': 2, 'stat.ML': 3, 'cs.CV': 4, 'cs.LG': 5}
18
  labels_decoder = {'cs.NE': 'Neural and Evolutionary Computing', 'cs.CL': 'Computation and Language', 'cs.AI': 'Artificial Intelligence',
@@ -37,6 +30,7 @@ class BertClassifier(nn.Module):
37
  final_layer = self.relu(linear_output)
38
  return final_layer
39
 
 
40
  def build_model():
41
  model = BertClassifier(n_classes=len(labels))
42
  st.markdown("Model created")
@@ -45,8 +39,6 @@ def build_model():
45
  st.markdown("Model weights loaded")
46
  return model
47
 
48
- model = build_model()
49
-
50
  def inference(txt, mode=None):
51
  # infers classes for text topic based on the trained model from above
52
  # has separate mode 'print' for just output
@@ -62,11 +54,22 @@ def inference(txt, mode=None):
62
  out = out/out.sum() * 100
63
  res = [(l, o) for l, o in zip (list(labels.keys()), out.tolist())]
64
  return res
65
-
66
- res = inference(text, mode=None)
67
- st.markdown("INFERENCE RESULT:")
 
 
 
 
 
 
 
 
 
 
 
68
  for lbl, score in res:
69
  if score >=1:
70
- st.markdown(f"[{lbl:<7}] {labels_decoder[lbl]:<35} {score:.1f}%")
71
 
72
  st.markdown(f"cycle time = {time() - start_time:.2f} s.")
 
6
  # device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
7
  device = 'cpu'
8
 
 
 
 
 
 
 
 
9
  # dict for decoding / enclding labels
10
  labels = {'cs.NE': 0, 'cs.CL': 1, 'cs.AI': 2, 'stat.ML': 3, 'cs.CV': 4, 'cs.LG': 5}
11
  labels_decoder = {'cs.NE': 'Neural and Evolutionary Computing', 'cs.CL': 'Computation and Language', 'cs.AI': 'Artificial Intelligence',
 
30
  final_layer = self.relu(linear_output)
31
  return final_layer
32
 
33
+ @st.cache(suppress_st_warning=True)
34
  def build_model():
35
  model = BertClassifier(n_classes=len(labels))
36
  st.markdown("Model created")
 
39
  st.markdown("Model weights loaded")
40
  return model
41
 
 
 
42
  def inference(txt, mode=None):
43
  # infers classes for text topic based on the trained model from above
44
  # has separate mode 'print' for just output
 
54
  out = out/out.sum() * 100
55
  res = [(l, o) for l, o in zip (list(labels.keys()), out.tolist())]
56
  return res
57
+
58
+ model = build_model()
59
+
60
+ st.markdown("### Privet, mir!")
61
+ st.markdown("<img width=200px src='https://i.pinimg.com/736x/11/33/19/113319f0ffe91f4bb0f468914b9916da.jpg'>", unsafe_allow_html=True)
62
+
63
+ text = st.text_area("ENTER TEXT HERE")
64
+ start_time = time()
65
+ st.markdown("INFERENCE STARTS ...")
66
+
67
+
68
+ res = inference(text, mode=None)
69
+ res.sort(key = lambda x : - x[1])
70
+ st.markdown("<b>INFERENCE RESULT:</b>")
71
  for lbl, score in res:
72
  if score >=1:
73
+ st.markdown(f"[ {lbl:<7}] {labels_decoder[lbl]:<35} {score:.1f}%")
74
 
75
  st.markdown(f"cycle time = {time() - start_time:.2f} s.")