Felladrin commited on
Commit
abae382
·
verified ·
1 Parent(s): 4d9f180

Update generation parameters for Llama-68M-Chat-v1 model

Browse files
Files changed (1) hide show
  1. app.py +2 -0
app.py CHANGED
@@ -24,6 +24,8 @@ def generate(
24
 
25
  if model_name == "Felladrin/Pythia-31M-Chat-v1":
26
  outputs = pipe(prompt, max_length=1024, use_cache=True, penalty_alpha=0.5, top_k=2, repetition_penalty=1.0016)
 
 
27
  elif model_name == "Felladrin/Smol-Llama-101M-Chat-v1":
28
  outputs = pipe(prompt, max_length=1024, use_cache=True, penalty_alpha=0.5, top_k=4, repetition_penalty=1.105)
29
  elif model_name == "Felladrin/Llama-160M-Chat-v1":
 
24
 
25
  if model_name == "Felladrin/Pythia-31M-Chat-v1":
26
  outputs = pipe(prompt, max_length=1024, use_cache=True, penalty_alpha=0.5, top_k=2, repetition_penalty=1.0016)
27
+ elif model_name == "Felladrin/Llama-68M-Chat-v1":
28
+ outputs = pipe(prompt, max_length=1024, use_cache=True, penalty_alpha=0.5, top_k=4, repetition_penalty=1.001)
29
  elif model_name == "Felladrin/Smol-Llama-101M-Chat-v1":
30
  outputs = pipe(prompt, max_length=1024, use_cache=True, penalty_alpha=0.5, top_k=4, repetition_penalty=1.105)
31
  elif model_name == "Felladrin/Llama-160M-Chat-v1":