ubermenchh commited on
Commit
3031245
1 Parent(s): bfbec74

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -40,7 +40,7 @@ def run(message, chat_history, system_prompt, max_new_tokens=1024, temperature=0
40
  stream = client.generate_stream(prompt, **generate_kwargs)
41
  output = ''
42
  for response in stream:
43
- if any([end_token in response.token_text for end_token in [EOS_STRING, EOT_STRING]]):
44
  return output
45
  else:
46
  output += response.token.text
 
40
  stream = client.generate_stream(prompt, **generate_kwargs)
41
  output = ''
42
  for response in stream:
43
+ if any([end_token in response.token.text for end_token in [EOS_STRING, EOT_STRING]]):
44
  return output
45
  else:
46
  output += response.token.text