Update README.md
Browse files
README.md
CHANGED
@@ -84,7 +84,7 @@ import torch
|
|
84 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
85 |
|
86 |
model_path = "internlm/internlm2-chat-20b"
|
87 |
-
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype=torch.float16, trust_remote_code=True)
|
88 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
89 |
|
90 |
model = model.eval()
|
@@ -154,7 +154,7 @@ import torch
|
|
154 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
155 |
|
156 |
model_path = "internlm/internlm2-chat-20b"
|
157 |
-
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dype=torch.float16, trust_remote_code=True)
|
158 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
159 |
|
160 |
model = model.eval()
|
|
|
84 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
85 |
|
86 |
model_path = "internlm/internlm2-chat-20b"
|
87 |
+
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype=torch.float16, trust_remote_code=True).cuda()
|
88 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
89 |
|
90 |
model = model.eval()
|
|
|
154 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
155 |
|
156 |
model_path = "internlm/internlm2-chat-20b"
|
157 |
+
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dype=torch.float16, trust_remote_code=True).cuda()
|
158 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
159 |
|
160 |
model = model.eval()
|