Spaces:
Runtime error
Runtime error
Include max_length in generation config
Browse files
app.py
CHANGED
@@ -19,7 +19,7 @@ def inference(max_length, input_text, history=[]):
|
|
19 |
input_ids = tokenizer(input_text, return_tensors="pt").input_ids
|
20 |
outputs = model.generate(
|
21 |
input_ids=input_ids,
|
22 |
-
generation_config=GenerationConfig(decoder_start_token_id=2),
|
23 |
)
|
24 |
result = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
25 |
history.append((input_text, result))
|
|
|
19 |
input_ids = tokenizer(input_text, return_tensors="pt").input_ids
|
20 |
outputs = model.generate(
|
21 |
input_ids=input_ids,
|
22 |
+
generation_config=GenerationConfig(max_length=max_length, decoder_start_token_id=2),
|
23 |
)
|
24 |
result = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
25 |
history.append((input_text, result))
|