pablo-rf commited on
Commit
a434986
1 Parent(s): 6d6ce37

Avaliable temperature with do_sample

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -55,7 +55,8 @@ def predict(prompt, model_select, max_length, repetition_penalty, temperature):
55
  max_length=prompt_length + max_length,
56
  pad_token_id=generator_model.tokenizer.eos_token_id,
57
  repetition_penalty=repetition_penalty,
58
- temperature=temperature)
 
59
 
60
  generated_sequence = generated_text[0]['generated_text']
61
  if generated_sequence is None:
 
55
  max_length=prompt_length + max_length,
56
  pad_token_id=generator_model.tokenizer.eos_token_id,
57
  repetition_penalty=repetition_penalty,
58
+ temperature=temperature,
59
+ do_sample=True)
60
 
61
  generated_sequence = generated_text[0]['generated_text']
62
  if generated_sequence is None: