eson's picture
update
9495a4f
raw
history blame
No virus
324 Bytes
from transformers import AutoTokenizer
from vocab import TokenizerType
tokenizer = AutoTokenizer.from_pretrained("baichuan-inc/Baichuan2-7B-Chat", trust_remote_code=True)
# byte-bpe sentencepiece
tokenizer.type = TokenizerType.ByteBPE
tokenizer.comments = "expand the vocqbulary size from 64000 in Baichuan1 to 125696"