Spaces:
Running
Running
Avaliable temperature with do_sample
Browse files
app.py
CHANGED
@@ -55,7 +55,8 @@ def predict(prompt, model_select, max_length, repetition_penalty, temperature):
|
|
55 |
max_length=prompt_length + max_length,
|
56 |
pad_token_id=generator_model.tokenizer.eos_token_id,
|
57 |
repetition_penalty=repetition_penalty,
|
58 |
-
temperature=temperature
|
|
|
59 |
|
60 |
generated_sequence = generated_text[0]['generated_text']
|
61 |
if generated_sequence is None:
|
|
|
55 |
max_length=prompt_length + max_length,
|
56 |
pad_token_id=generator_model.tokenizer.eos_token_id,
|
57 |
repetition_penalty=repetition_penalty,
|
58 |
+
temperature=temperature,
|
59 |
+
do_sample=True)
|
60 |
|
61 |
generated_sequence = generated_text[0]['generated_text']
|
62 |
if generated_sequence is None:
|