Spaces:
Runtime error
Runtime error
aidan-o-brien
commited on
Commit
•
1f6bd67
1
Parent(s):
15ccfd9
added another model - model selection in sidebar
Browse files
app.py
CHANGED
@@ -12,11 +12,24 @@ def load_data(file):
|
|
12 |
return df
|
13 |
|
14 |
|
15 |
-
#@st.cache # tokenier cannot be cached
|
16 |
def load_pipeline(model_cp, tokenizer_cp):
|
17 |
return pipeline("question-answering", model=model_cp, tokenizer=tokenizer_cp)
|
18 |
|
19 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
# Page config
|
21 |
title = "Recipe Improver"
|
22 |
icon = "🍣"
|
@@ -24,9 +37,8 @@ st.set_page_config(page_title=title, page_icon=icon)
|
|
24 |
st.title(title)
|
25 |
|
26 |
|
27 |
-
# Load
|
28 |
-
model_cp =
|
29 |
-
tokenizer_cp = "albert-base-v2"
|
30 |
question_answer = load_pipeline(model_cp, tokenizer_cp)
|
31 |
st.write("Model and tokenizer successfully loaded.")
|
32 |
|
@@ -39,7 +51,6 @@ with st.expander("Upload csv file"):
|
|
39 |
# If file is uploaded, run inference
|
40 |
if uploaded_file is not None:
|
41 |
df = load_data(uploaded_file)
|
42 |
-
st.write(df.head())
|
43 |
|
44 |
# Run inference on first example
|
45 |
first_example = df['review'][0]
|
|
|
12 |
return df
|
13 |
|
14 |
|
|
|
15 |
def load_pipeline(model_cp, tokenizer_cp):
|
16 |
return pipeline("question-answering", model=model_cp, tokenizer=tokenizer_cp)
|
17 |
|
18 |
|
19 |
+
@st.cache
|
20 |
+
def choose_model():
|
21 |
+
with st.sidebar:
|
22 |
+
st.write("# Model Selection")
|
23 |
+
model_cp = st.selectbox('Select model for inference',
|
24 |
+
('deepset/roberta-base-squad2',
|
25 |
+
'aidan-o-brien/recipe-improver'))
|
26 |
+
# If not my model > model_cp = tokenizer_cp, else > albert tokenizer
|
27 |
+
if model_cp == "aidan-o-brien/recipe-improver":
|
28 |
+
return model_cp, "albert-base-v2"
|
29 |
+
else:
|
30 |
+
return model_cp, model_cp
|
31 |
+
|
32 |
+
|
33 |
# Page config
|
34 |
title = "Recipe Improver"
|
35 |
icon = "🍣"
|
|
|
37 |
st.title(title)
|
38 |
|
39 |
|
40 |
+
# Load model and tokenzier
|
41 |
+
model_cp, tokenizer_cp = choose_model()
|
|
|
42 |
question_answer = load_pipeline(model_cp, tokenizer_cp)
|
43 |
st.write("Model and tokenizer successfully loaded.")
|
44 |
|
|
|
51 |
# If file is uploaded, run inference
|
52 |
if uploaded_file is not None:
|
53 |
df = load_data(uploaded_file)
|
|
|
54 |
|
55 |
# Run inference on first example
|
56 |
first_example = df['review'][0]
|