underspirit commited on
Commit
18ab44d
1 Parent(s): 3afe4cd

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -114,7 +114,7 @@ The XVERSE-13B model can be loaded for inference using the following code:
114
  >>> import torch
115
  >>> from transformers import AutoTokenizer, AutoModelForCausalLM
116
  >>> tokenizer = AutoTokenizer.from_pretrained("xverse/XVERSE-13B")
117
- >>> model = AutoModelForCausalLM.from_pretrained("xverse/XVERSE-13B", trust_remote_code=True, torch_dtype=torch.float16, device_map='auto')
118
  >>> model = model.eval()
119
  >>> inputs = tokenizer('北京的景点:故宫、天坛、万里长城等。\n深圳的景点:', return_tensors='pt').input_ids
120
  >>> inputs = inputs.cuda()
 
114
  >>> import torch
115
  >>> from transformers import AutoTokenizer, AutoModelForCausalLM
116
  >>> tokenizer = AutoTokenizer.from_pretrained("xverse/XVERSE-13B")
117
+ >>> model = AutoModelForCausalLM.from_pretrained("xverse/XVERSE-13B", trust_remote_code=True, torch_dtype=torch.bfloat16, device_map='auto')
118
  >>> model = model.eval()
119
  >>> inputs = tokenizer('北京的景点:故宫、天坛、万里长城等。\n深圳的景点:', return_tensors='pt').input_ids
120
  >>> inputs = inputs.cuda()