Update README.md
Browse files
README.md
CHANGED
@@ -54,7 +54,7 @@ import torch
|
|
54 |
tokenizer = AutoTokenizer.from_pretrained("Bo1015/proteinglm-100b-int4", trust_remote_code=True, use_fast=True)
|
55 |
config = AutoConfig.from_pretrained("Bo1015/proteinglm-100b-int4", trust_remote_code=True, torch_dtype=torch.half)
|
56 |
config.is_causal=False
|
57 |
-
config.post_layer_norm=True # use the final layernorm or not
|
58 |
model = AutoModelForMaskedLM.from_pretrained("Bo1015/proteinglm-100b-int4", config = config, torch_dtype=torch.half,trust_remote_code=True)
|
59 |
if torch.cuda.is_available():
|
60 |
model = model.cuda()
|
|
|
54 |
tokenizer = AutoTokenizer.from_pretrained("Bo1015/proteinglm-100b-int4", trust_remote_code=True, use_fast=True)
|
55 |
config = AutoConfig.from_pretrained("Bo1015/proteinglm-100b-int4", trust_remote_code=True, torch_dtype=torch.half)
|
56 |
config.is_causal=False
|
57 |
+
config.post_layer_norm=True # use the final layernorm or not, some tasks set to false would be better.
|
58 |
model = AutoModelForMaskedLM.from_pretrained("Bo1015/proteinglm-100b-int4", config = config, torch_dtype=torch.half,trust_remote_code=True)
|
59 |
if torch.cuda.is_available():
|
60 |
model = model.cuda()
|