xu-song's picture
update
751936e
raw
history blame
308 Bytes
import os
from transformers import BertTokenizerFast
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
TOKENIZER_DIR = os.path.join(CURRENT_DIR, "tokenizer")
tokenizer = BertTokenizerFast.from_pretrained(TOKENIZER_DIR)
# vocab_size = len(tokenizer.get_vocab())
# vocab_size = tokenizer.vocab_size