Update README.md
Browse files
README.md
CHANGED
@@ -87,4 +87,4 @@ tokenizer = AutoTokenizer.from_pretrained("traintogpb-ke-t5-base-aihub-koen-tran
|
|
87 |
|
88 |
inputs = tokenizer.encode("This is a sample text.", return_tensors="pt")
|
89 |
outputs = model.generate(inputs)
|
90 |
-
print(tokenizer.decode(outputs[0], skip_special_tokens=True))
|
|
|
87 |
|
88 |
inputs = tokenizer.encode("This is a sample text.", return_tensors="pt")
|
89 |
outputs = model.generate(inputs)
|
90 |
+
print(tokenizer.decode(outputs[0], skip_special_tokens=True))
|