Update README.md
Browse files
README.md
CHANGED
@@ -86,7 +86,7 @@ import torch
|
|
86 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
87 |
|
88 |
model_path = "internlm/internlm2-chat-20b-sft"
|
89 |
-
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype=torch.float16, trust_remote_code=True)
|
90 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
91 |
|
92 |
model = model.eval()
|
@@ -159,7 +159,7 @@ import torch
|
|
159 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
160 |
|
161 |
model_path = "internlm/internlm2-chat-20b-sft"
|
162 |
-
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dype=torch.float16, trust_remote_code=True)
|
163 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
164 |
|
165 |
model = model.eval()
|
|
|
86 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
87 |
|
88 |
model_path = "internlm/internlm2-chat-20b-sft"
|
89 |
+
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype=torch.float16, trust_remote_code=True).cuda()
|
90 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
91 |
|
92 |
model = model.eval()
|
|
|
159 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
160 |
|
161 |
model_path = "internlm/internlm2-chat-20b-sft"
|
162 |
+
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dype=torch.float16, trust_remote_code=True).cuda()
|
163 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
164 |
|
165 |
model = model.eval()
|