kajyuuen commited on
Commit
b6dc1b1
1 Parent(s): b216be9

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -0
README.md CHANGED
@@ -17,6 +17,8 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline, set_seed
17
 
18
  model = AutoModelForCausalLM.from_pretrained("sbintuitions/sarashina1-13b", torch_dtype=torch.float16, device_map="auto")
19
  tokenizer = AutoTokenizer.from_pretrained("sbintuitions/sarashina1-13b")
 
 
20
  generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
21
  set_seed(123)
22
 
 
17
 
18
  model = AutoModelForCausalLM.from_pretrained("sbintuitions/sarashina1-13b", torch_dtype=torch.float16, device_map="auto")
19
  tokenizer = AutoTokenizer.from_pretrained("sbintuitions/sarashina1-13b")
20
+ # If you want to use slow tokenizer
21
+ # tokenizer = AutoTokenizer.from_pretrained("sbintuitions/sarashina1-13b", use_fast=False, revision="slow-tokenizer")
22
  generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
23
  set_seed(123)
24