Bo1015 commited on
Commit
39fdf5d
·
verified ·
1 Parent(s): d4bc9ba

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -54,7 +54,7 @@ import torch
54
  tokenizer = AutoTokenizer.from_pretrained("Bo1015/proteinglm-100b-int4", trust_remote_code=True, use_fast=True)
55
  config = AutoConfig.from_pretrained("Bo1015/proteinglm-100b-int4", trust_remote_code=True, torch_dtype=torch.half)
56
  config.is_causal=False
57
- config.post_layer_norm=True # use the final layernorm or not
58
  model = AutoModelForMaskedLM.from_pretrained("Bo1015/proteinglm-100b-int4", config = config, torch_dtype=torch.half,trust_remote_code=True)
59
  if torch.cuda.is_available():
60
  model = model.cuda()
 
54
  tokenizer = AutoTokenizer.from_pretrained("Bo1015/proteinglm-100b-int4", trust_remote_code=True, use_fast=True)
55
  config = AutoConfig.from_pretrained("Bo1015/proteinglm-100b-int4", trust_remote_code=True, torch_dtype=torch.half)
56
  config.is_causal=False
57
+ config.post_layer_norm=True # use the final layernorm or not, some tasks set to false would be better.
58
  model = AutoModelForMaskedLM.from_pretrained("Bo1015/proteinglm-100b-int4", config = config, torch_dtype=torch.half,trust_remote_code=True)
59
  if torch.cuda.is_available():
60
  model = model.cuda()