JacopoAbate commited on
Commit
57e1005
1 Parent(s): 9373da9

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -38,7 +38,7 @@ pip install transformers torch sentencepiece
38
  ```python
39
  from transformers import AutoModelForCausalLM, AutoTokenizer
40
 
41
- device = "cuda" # change to cpu if you have no gpu
42
 
43
  model = AutoModelForCausalLM.from_pretrained("MoxoffSpA/Azzurro")
44
  tokenizer = AutoTokenizer.from_pretrained("MoxoffSpA/Azzurro")
@@ -59,7 +59,7 @@ encodeds = tokenizer.apply_chat_template(messages, return_tensors="pt")
59
  model_inputs = encodeds.to(device)
60
  model.to(device)
61
 
62
- generated_ids = model.generate(model_inputs, max_new_tokens=250, do_sample=True)
63
  decoded = tokenizer.batch_decode(generated_ids)
64
  print(decoded[0])
65
  ```
 
38
  ```python
39
  from transformers import AutoModelForCausalLM, AutoTokenizer
40
 
41
+ device = "cpu" # change to cuda if you want to use the gpu and have cuda installed
42
 
43
  model = AutoModelForCausalLM.from_pretrained("MoxoffSpA/Azzurro")
44
  tokenizer = AutoTokenizer.from_pretrained("MoxoffSpA/Azzurro")
 
59
  model_inputs = encodeds.to(device)
60
  model.to(device)
61
 
62
+ generated_ids = model.generate(model_inputs, max_new_tokens=256, do_sample=True)
63
  decoded = tokenizer.batch_decode(generated_ids)
64
  print(decoded[0])
65
  ```