pcuenq HF staff commited on
Commit
7318f57
1 Parent(s): 31c147a

Use both eos tokens

Browse files
Files changed (1) hide show
  1. app.py +1 -0
app.py CHANGED
@@ -84,6 +84,7 @@ def chat_llama3_8b(message: str,
84
  max_new_tokens=max_new_tokens,
85
  do_sample=True,
86
  temperature=temperature,
 
87
  )
88
  # This will enforce greedy generation (do_sample=False) when the temperature is passed 0, avoiding the crash.
89
  if temperature == 0:
 
84
  max_new_tokens=max_new_tokens,
85
  do_sample=True,
86
  temperature=temperature,
87
+ eos_token_id=[128001, 128009],
88
  )
89
  # This will enforce greedy generation (do_sample=False) when the temperature is passed 0, avoiding the crash.
90
  if temperature == 0: