No need for overriding decoder start token anymore
Browse files
README.md
CHANGED
@@ -443,11 +443,7 @@ tokenizer = T5Tokenizer.from_pretrained('jbochi/madlad400-3b-mt')
|
|
443 |
|
444 |
text = "<2pt> I love pizza!"
|
445 |
input_ids = tokenizer(text, return_tensors="pt").input_ids
|
446 |
-
outputs = model.generate(
|
447 |
-
input_ids=input_ids,
|
448 |
-
generation_config=GenerationConfig(
|
449 |
-
decoder_start_token_id=0,
|
450 |
-
))
|
451 |
|
452 |
tokenizer.decode(outputs[0], skip_special_tokens=True)
|
453 |
# Eu adoro pizza!
|
|
|
443 |
|
444 |
text = "<2pt> I love pizza!"
|
445 |
input_ids = tokenizer(text, return_tensors="pt").input_ids
|
446 |
+
outputs = model.generate(input_ids=input_ids)
|
|
|
|
|
|
|
|
|
447 |
|
448 |
tokenizer.decode(outputs[0], skip_special_tokens=True)
|
449 |
# Eu adoro pizza!
|