ggbetz commited on
Commit
ceb42dc
1 Parent(s): edf0350

Update app.

Browse files
Files changed (1) hide show
  1. app.py +3 -6
app.py CHANGED
@@ -64,7 +64,8 @@ GEN_CHAINS = [
64
 
65
  INFERENCE_PARAMS = {
66
  'max_length':450,
67
- 'clean_up_tokenization_spaces': False
 
68
  }
69
 
70
  HTML_WRAPPER = """<div style="overflow-x: auto; border: 1px solid #e6e9ef; border-radius: 0.25rem; padding: 1rem; margin-bottom: 2.5rem">{}</div>"""
@@ -261,11 +262,7 @@ def run_model(mode_set, user_input):
261
  inquire_prompt = inquire_prompt + (f"{to_key}: {from_key}: {current_input[from_key]}")
262
  # inquire model
263
  inputs = inquire_prompt
264
- out = inference(
265
- inputs,
266
- parameters=INFERENCE_PARAMS,
267
- options={'use_cache':False}
268
- )
269
  out = out[0]['generated_text']
270
  # cleanup formalization
271
  if to_key in ['premises_formalized','conclusion_formalized']:
 
64
 
65
  INFERENCE_PARAMS = {
66
  'max_length':450,
67
+ 'clean_up_tokenization_spaces': False,
68
+ 'use_cache':False
69
  }
70
 
71
  HTML_WRAPPER = """<div style="overflow-x: auto; border: 1px solid #e6e9ef; border-radius: 0.25rem; padding: 1rem; margin-bottom: 2.5rem">{}</div>"""
 
262
  inquire_prompt = inquire_prompt + (f"{to_key}: {from_key}: {current_input[from_key]}")
263
  # inquire model
264
  inputs = inquire_prompt
265
+ out = inference(inputs,INFERENCE_PARAMS)
 
 
 
 
266
  out = out[0]['generated_text']
267
  # cleanup formalization
268
  if to_key in ['premises_formalized','conclusion_formalized']: