3v324v23 commited on
Commit
eee4cb3
1 Parent(s): 2420d62
Files changed (1) hide show
  1. request_llm/bridge_tgui.py +3 -3
request_llm/bridge_tgui.py CHANGED
@@ -26,7 +26,7 @@ def random_hash():
26
 
27
  async def run(context):
28
  params = {
29
- 'max_new_tokens': 1024,
30
  'do_sample': True,
31
  'temperature': 0.5,
32
  'top_p': 0.9,
@@ -39,7 +39,7 @@ async def run(context):
39
  'num_beams': 1,
40
  'penalty_alpha': 0,
41
  'length_penalty': 1,
42
- 'early_stopping': False,
43
  'seed': -1,
44
  }
45
  session = random_hash()
@@ -144,7 +144,7 @@ def predict_tgui_no_ui(inputs, top_p, temperature, history=[], sys_prompt=""):
144
  raw_input = "What I would like to say is the following: " + inputs
145
  prompt = inputs
146
  tgui_say = ""
147
- mutable = [""]
148
  def run_coorotine(mutable):
149
  async def get_result(mutable):
150
  async for response in run(prompt):
 
26
 
27
  async def run(context):
28
  params = {
29
+ 'max_new_tokens': 512,
30
  'do_sample': True,
31
  'temperature': 0.5,
32
  'top_p': 0.9,
 
39
  'num_beams': 1,
40
  'penalty_alpha': 0,
41
  'length_penalty': 1,
42
+ 'early_stopping': True,
43
  'seed': -1,
44
  }
45
  session = random_hash()
 
144
  raw_input = "What I would like to say is the following: " + inputs
145
  prompt = inputs
146
  tgui_say = ""
147
+ mutable = ["", time.time()]
148
  def run_coorotine(mutable):
149
  async def get_result(mutable):
150
  async for response in run(prompt):