rooa commited on
Commit
7e628ed
1 Parent(s): b2d761b

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -37,8 +37,8 @@ The models can be used as auto-regressive samplers as follows:
37
  import torch
38
  from transformers import AutoTokenizer, AutoModelForCausalLM
39
 
40
- tokenizer = AutoTokenizer.from_pretrained("Salesforce/xgen-7b-8k-base", trust_remote_code=True)
41
- model = AutoModelForCausalLM.from_pretrained("Salesforce/xgen-7b-8k-base", torch_dtype=torch.bfloat16)
42
  inputs = tokenizer("The world is", return_tensors="pt")
43
  sample = model.generate(**inputs, max_length=128)
44
  print(tokenizer.decode(sample[0]))
 
37
  import torch
38
  from transformers import AutoTokenizer, AutoModelForCausalLM
39
 
40
+ tokenizer = AutoTokenizer.from_pretrained("Salesforce/xgen-7b-8k-inst", trust_remote_code=True)
41
+ model = AutoModelForCausalLM.from_pretrained("Salesforce/xgen-7b-8k-inst", torch_dtype=torch.bfloat16)
42
  inputs = tokenizer("The world is", return_tensors="pt")
43
  sample = model.generate(**inputs, max_length=128)
44
  print(tokenizer.decode(sample[0]))