AleksBlacky commited on
Commit
66700ad
β€’
1 Parent(s): 739c6cd

update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -29
app.py CHANGED
@@ -7,9 +7,8 @@ from transformers import AutoTokenizer, AutoModelForSequenceClassification
7
 
8
  st.markdown("# Hello, friend!")
9
  st.markdown(" This magic application going to help you with understanding of science paper topic! Cool? Yeah! ")
10
- # st.markdown("<img width=200px src='https://rozetked.me/images/uploads/dwoilp3BVjlE.jpg'>", unsafe_allow_html=True)
11
 
12
- st.write("Loading tokenizer and dict")
13
  model_name_global = "allenai/scibert_scivocab_uncased"
14
  tokenizer_ = AutoTokenizer.from_pretrained(model_name_global)
15
  with open('./models/scibert/decode_dict.pkl', 'rb') as f:
@@ -18,32 +17,7 @@ with open('./models/scibert/decode_dict.pkl', 'rb') as f:
18
  with st.form(key="my_form"):
19
  st.markdown("### 🎈 Do you want a little magic? ")
20
  st.markdown(" Write your article title and abstract to textboxes bellow and I'll gues topic of your paper! ")
21
- # ce, c1, ce, c2, c3 = st.columns([0.07, 1, 0.07, 5, 0.07])
22
- ce, c2, c3 = st.columns([0.07, 5, 0.07])
23
- # with c1:
24
- # ModelType = st.radio(
25
- # "Choose your model",
26
- # ["DistilBERT (Default)", "Flair"],
27
- # help="At present, you can choose between 2 models (Flair or DistilBERT) to embed your text. More to come!",
28
- # )
29
- #
30
- # if ModelType == "Default (DistilBERT)":
31
- # # kw_model = KeyBERT(model=roberta)
32
- #
33
- # @st.cache(allow_output_mutation=True)
34
- # def load_model():
35
- # return KeyBERT(model=roberta)
36
- #
37
- #
38
- # kw_model = load_model()
39
- #
40
- # else:
41
- # @st.cache(allow_output_mutation=True)
42
- # def load_model():
43
- # return KeyBERT("distilbert-base-nli-mean-tokens")
44
- #
45
- #
46
- # kw_model = load_model()
47
 
48
  with c2:
49
  doc_title = st.text_area(
@@ -115,7 +89,7 @@ title = doc_title
115
  abstract = doc_abstract
116
  tokens = tokenizer_(title + abstract, return_tensors="pt")
117
 
118
- predicts = make_predict(model_name_global, model_local, tokens, decode_dict, title, abstract)
119
 
120
  st.markdown("## 🎈 Yor article probably about: ")
121
  st.header("")
7
 
8
  st.markdown("# Hello, friend!")
9
  st.markdown(" This magic application going to help you with understanding of science paper topic! Cool? Yeah! ")
 
10
 
11
+ # st.write("Loading tokenizer and dict")
12
  model_name_global = "allenai/scibert_scivocab_uncased"
13
  tokenizer_ = AutoTokenizer.from_pretrained(model_name_global)
14
  with open('./models/scibert/decode_dict.pkl', 'rb') as f:
17
  with st.form(key="my_form"):
18
  st.markdown("### 🎈 Do you want a little magic? ")
19
  st.markdown(" Write your article title and abstract to textboxes bellow and I'll gues topic of your paper! ")
20
+ ce, c2, c3 = st.columns([0.07, 6, 0.07])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
 
22
  with c2:
23
  doc_title = st.text_area(
89
  abstract = doc_abstract
90
  tokens = tokenizer_(title + abstract, return_tensors="pt")
91
 
92
+ predicts = make_predict(tokens, decode_dict)
93
 
94
  st.markdown("## 🎈 Yor article probably about: ")
95
  st.header("")