Update README.md
Browse files
README.md
CHANGED
@@ -15,5 +15,9 @@ To use this model, you can just load it via `transformers` in fp16:
|
|
15 |
import torch
|
16 |
from transformers import AutoModelForCausalLM
|
17 |
|
18 |
-
model = AutoModelForCausalLM.from_pretrained(
|
|
|
|
|
|
|
|
|
19 |
```
|
|
|
15 |
import torch
|
16 |
from transformers import AutoModelForCausalLM
|
17 |
|
18 |
+
model = AutoModelForCausalLM.from_pretrained(
|
19 |
+
"arnavgrg/llama-2-7b-nf4-fp16-upscaled",
|
20 |
+
device_map="auto",
|
21 |
+
torch_dtype=torch.float16,
|
22 |
+
)
|
23 |
```
|