File size: 196 Bytes
d8562b1
 
 
 
1
2
3
4
from transformers import AutoModel

model = AutoModel.from_pretrained("/mnt/vepfs/qinkai/release/codegeex2-6b/", trust_remote_code=True).cuda()
model.save_pretrained("./", max_shard_size="2000MB")