strangekitten commited on
Commit
0666c2a
1 Parent(s): 5fb16da

Update utils.py

Browse files
Files changed (1) hide show
  1. utils.py +1 -3
utils.py CHANGED
@@ -10,7 +10,6 @@ def get_most_probability_terms(logits, top_k=5):
10
  _, indices = torch.sort(logits, dim=1, descending=True)
11
  return indices[:, :top_k]
12
 
13
- @st.cache(suppress_st_warning=True)
14
  def predict(text, model, tokenizer, my_linear, top_k=10):
15
  tokens = tokenizer.encode(text)
16
  with torch.no_grad():
@@ -19,8 +18,7 @@ def predict(text, model, tokenizer, my_linear, top_k=10):
19
  return np.array(classes)[get_most_probability_terms(logits, top_k).cpu().numpy()][0]
20
 
21
 
22
- @st.cache(suppress_st_warning=True)
23
- def get_answer_with_desc(text, model, tokenizer, my_linear, top_k=3):
24
  codes = predict(text, model, tokenizer, my_linear)
25
  answer = ["Possible text topics:"]
26
  for code in codes[:top_k]:
 
10
  _, indices = torch.sort(logits, dim=1, descending=True)
11
  return indices[:, :top_k]
12
 
 
13
  def predict(text, model, tokenizer, my_linear, top_k=10):
14
  tokens = tokenizer.encode(text)
15
  with torch.no_grad():
 
18
  return np.array(classes)[get_most_probability_terms(logits, top_k).cpu().numpy()][0]
19
 
20
 
21
+ def get_answer_with_desc(text, model, tokenizer, my_linear, top_k=10):
 
22
  codes = predict(text, model, tokenizer, my_linear)
23
  answer = ["Possible text topics:"]
24
  for code in codes[:top_k]: