winglian commited on
Commit
f605ae7
1 Parent(s): f111d2b

strip redundant intermediate var

Browse files
Files changed (1) hide show
  1. app.py +4 -6
app.py CHANGED
@@ -331,9 +331,8 @@ def open_chat(model_name, history, system_msg, max_new_tokens, temperature, top_
331
  model_res = model(messages, config=config) # type: Generator[List[Dict[str, str]], None, None]
332
  for res in model_res:
333
  tokens = re.findall(r'\s*\S+\s*', res[0]['generated_text'])
334
- for s in tokens:
335
- answer = s
336
- history[-1][1] += answer
337
  # stream the response
338
  yield history, history, ""
339
  sleep(0.01)
@@ -359,9 +358,8 @@ def open_rp_chat(model_name, history, system_msg, max_new_tokens, temperature, t
359
  model_res = model(messages, config=config) # type: Generator[List[Dict[str, str]], None, None]
360
  for res in model_res:
361
  tokens = re.findall(r'\s*\S+\s*', res[0]['generated_text'])
362
- for s in tokens:
363
- answer = s
364
- history[-1][1] += answer
365
  # stream the response
366
  yield history, history, ""
367
  sleep(0.01)
 
331
  model_res = model(messages, config=config) # type: Generator[List[Dict[str, str]], None, None]
332
  for res in model_res:
333
  tokens = re.findall(r'\s*\S+\s*', res[0]['generated_text'])
334
+ for subtoken in tokens:
335
+ history[-1][1] += subtoken
 
336
  # stream the response
337
  yield history, history, ""
338
  sleep(0.01)
 
358
  model_res = model(messages, config=config) # type: Generator[List[Dict[str, str]], None, None]
359
  for res in model_res:
360
  tokens = re.findall(r'\s*\S+\s*', res[0]['generated_text'])
361
+ for subtoken in tokens:
362
+ history[-1][1] += subtoken
 
363
  # stream the response
364
  yield history, history, ""
365
  sleep(0.01)