Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -160,9 +160,11 @@ if prompt := st.chat_input(initial_input):
|
|
160 |
end_t = time.time()
|
161 |
if user_timer == "Yes":
|
162 |
st.success(f"Running LLM. | Time: {end_t - begin_t} sec")
|
|
|
163 |
except:
|
164 |
st.warning("Sorry, the inference endpoint is temporarily down. π")
|
165 |
llm_response = "NA."
|
|
|
166 |
else:
|
167 |
st.warning(
|
168 |
"Apologies! We are in the progress of fine-tune the model, so it's currently unavailable. βοΈ"
|
@@ -170,7 +172,10 @@ if prompt := st.chat_input(initial_input):
|
|
170 |
llm_response = "NA"
|
171 |
|
172 |
finetuned_llm_guess = ["from_llm", question, llm_response, 0]
|
173 |
-
|
|
|
|
|
|
|
174 |
final_ref = final_ref.reset_index()
|
175 |
|
176 |
# add ai judge as additional rating
|
|
|
160 |
end_t = time.time()
|
161 |
if user_timer == "Yes":
|
162 |
st.success(f"Running LLM. | Time: {end_t - begin_t} sec")
|
163 |
+
did_this_llm_run = "yes"
|
164 |
except:
|
165 |
st.warning("Sorry, the inference endpoint is temporarily down. π")
|
166 |
llm_response = "NA."
|
167 |
+
did_this_llm_run = "no"
|
168 |
else:
|
169 |
st.warning(
|
170 |
"Apologies! We are in the progress of fine-tune the model, so it's currently unavailable. βοΈ"
|
|
|
172 |
llm_response = "NA"
|
173 |
|
174 |
finetuned_llm_guess = ["from_llm", question, llm_response, 0]
|
175 |
+
if did_this_llm_run == "no":
|
176 |
+
st.warning("Fine-tuned LLM not used in this call.")
|
177 |
+
else:
|
178 |
+
final_ref.loc[-1] = finetuned_llm_guess
|
179 |
final_ref = final_ref.reset_index()
|
180 |
|
181 |
# add ai judge as additional rating
|