ggbetz commited on
Commit
0716eb7
1 Parent(s): d38be7f

Catch API error

Browse files
Files changed (1) hide show
  1. app.py +14 -1
app.py CHANGED
@@ -74,6 +74,8 @@ INFERENCE_PARAMS = {
74
  'use_cache':False
75
  }
76
 
 
 
77
  HTML_WRAPPER = """<div style="overflow-x: auto; border: 1px solid #e6e9ef; border-radius: 0.25rem; padding: 1rem; margin-bottom: 2.5rem">{}</div>"""
78
 
79
  CACHE_SIZE = 10000
@@ -268,7 +270,18 @@ def run_model(mode_set, user_input):
268
  inquire_prompt = inquire_prompt + (f"{to_key}: {from_key}: {current_input[from_key]}")
269
  # inquire model
270
  inputs = inquire_prompt
271
- out = inference(inputs,INFERENCE_PARAMS)
 
 
 
 
 
 
 
 
 
 
 
272
  out = out[0]['generated_text']
273
  # cleanup formalization
274
  if to_key in ['premises_formalized','conclusion_formalized']:
 
74
  'use_cache':False
75
  }
76
 
77
+ MAX_API_CALLS = 3
78
+
79
  HTML_WRAPPER = """<div style="overflow-x: auto; border: 1px solid #e6e9ef; border-radius: 0.25rem; padding: 1rem; margin-bottom: 2.5rem">{}</div>"""
80
 
81
  CACHE_SIZE = 10000
 
270
  inquire_prompt = inquire_prompt + (f"{to_key}: {from_key}: {current_input[from_key]}")
271
  # inquire model
272
  inputs = inquire_prompt
273
+ attempts = 0
274
+ out = None
275
+ while out==None or attempts<MAX_API_CALLS:
276
+ try:
277
+ # api call
278
+ out = inference(inputs,INFERENCE_PARAMS)
279
+ except Exception:
280
+ attempts += 1
281
+ if attempts<MAX_API_CALLS:
282
+ st.warning(f"HF Inference API call (attempt {attempts} of {MAX_API_CALLS}) has failed. Trying again...")
283
+ else:
284
+ raise
285
  out = out[0]['generated_text']
286
  # cleanup formalization
287
  if to_key in ['premises_formalized','conclusion_formalized']: