underspirit
commited on
Commit
•
9beb409
1
Parent(s):
53d6bf7
Update README.md
Browse files
README.md
CHANGED
@@ -113,7 +113,7 @@ The XVERSE-13B model can be loaded for inference using the following code:
|
|
113 |
```python
|
114 |
>>> from transformers import AutoTokenizer, AutoModelForCausalLM
|
115 |
>>> tokenizer = AutoTokenizer.from_pretrained("xverse/XVERSE-13B")
|
116 |
-
>>> model = AutoModelForCausalLM.from_pretrained("xverse/XVERSE-13B", trust_remote_code=True
|
117 |
>>> model = model.eval()
|
118 |
>>> inputs = tokenizer('北京的景点:故宫、天坛、万里长城等。\n深圳的景点:', return_tensors='pt').input_ids
|
119 |
>>> inputs = inputs.cuda()
|
|
|
113 |
```python
|
114 |
>>> from transformers import AutoTokenizer, AutoModelForCausalLM
|
115 |
>>> tokenizer = AutoTokenizer.from_pretrained("xverse/XVERSE-13B")
|
116 |
+
>>> model = AutoModelForCausalLM.from_pretrained("xverse/XVERSE-13B", trust_remote_code=True, torch_dtype=torch.float16, device_map='auto')
|
117 |
>>> model = model.eval()
|
118 |
>>> inputs = tokenizer('北京的景点:故宫、天坛、万里长城等。\n深圳的景点:', return_tensors='pt').input_ids
|
119 |
>>> inputs = inputs.cuda()
|