Roger Condori commited on
Commit
33e49c0
1 Parent(s): a263964

app.py correct limit hf

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -81,6 +81,11 @@ def convert():
81
  DOC_DB_LIMIT = 10
82
  MAX_NEW_TOKENS = 2048
83
 
 
 
 
 
 
84
  with gr.Blocks(theme=theme, css=css) as demo:
85
  with gr.Tab("Chat"):
86
 
@@ -122,10 +127,5 @@ with gr.Blocks(theme=theme, css=css) as demo:
122
  change_model_button.click(dc.change_llm,[repo_, file_, max_tokens, temperature, top_p, top_k, repeat_penalty, max_docs],[model_verify])
123
 
124
  falcon_button.click(dc.default_falcon_model, [], [model_verify])
125
-
126
- # limit in HF, no need to set it
127
- if "SET_LIMIT" == os.getenv("DEMO"):
128
- DOC_DB_LIMIT = 4
129
- MAX_NEW_TOKENS = 32
130
 
131
  demo.launch(enable_queue=True)
 
81
  DOC_DB_LIMIT = 10
82
  MAX_NEW_TOKENS = 2048
83
 
84
+ # Limit in HF, no need to set it
85
+ if "SET_LIMIT" == os.getenv("DEMO"):
86
+ DOC_DB_LIMIT = 4
87
+ MAX_NEW_TOKENS = 32
88
+
89
  with gr.Blocks(theme=theme, css=css) as demo:
90
  with gr.Tab("Chat"):
91
 
 
127
  change_model_button.click(dc.change_llm,[repo_, file_, max_tokens, temperature, top_p, top_k, repeat_penalty, max_docs],[model_verify])
128
 
129
  falcon_button.click(dc.default_falcon_model, [], [model_verify])
 
 
 
 
 
130
 
131
  demo.launch(enable_queue=True)