andreidima
commited on
Update README.md
Browse files
README.md
CHANGED
@@ -36,7 +36,7 @@ input_ids = tokenizer(input_text, return_tensors="pt")
|
|
36 |
outputs = model.generate(
|
37 |
**input_ids,
|
38 |
max_new_tokens=100,
|
39 |
-
eos_token_id
|
40 |
)
|
41 |
print(tokenizer.decode(outputs[0]))
|
42 |
```
|
|
|
36 |
outputs = model.generate(
|
37 |
**input_ids,
|
38 |
max_new_tokens=100,
|
39 |
+
eos_token_id=[13] # 13 is the token ID for a newline character at the end of a non-empty line
|
40 |
)
|
41 |
print(tokenizer.decode(outputs[0]))
|
42 |
```
|