Update README.md
Browse files
README.md
CHANGED
@@ -83,7 +83,7 @@ import torch
|
|
83 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
84 |
|
85 |
model_path = "internlm/internlm2-chat-7b"
|
86 |
-
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype=torch.float16, trust_remote_code=True)
|
87 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
88 |
|
89 |
model = model.eval()
|
@@ -152,7 +152,7 @@ import torch
|
|
152 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
153 |
|
154 |
model_path = "internlm/internlm2-chat-7b"
|
155 |
-
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dype=torch.float16, trust_remote_code=True)
|
156 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
157 |
|
158 |
model = model.eval()
|
|
|
83 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
84 |
|
85 |
model_path = "internlm/internlm2-chat-7b"
|
86 |
+
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype=torch.float16, trust_remote_code=True).cuda()
|
87 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
88 |
|
89 |
model = model.eval()
|
|
|
152 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
153 |
|
154 |
model_path = "internlm/internlm2-chat-7b"
|
155 |
+
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dype=torch.float16, trust_remote_code=True).cuda()
|
156 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
157 |
|
158 |
model = model.eval()
|