File size: 308 Bytes
751936e |
1 2 3 4 5 6 7 8 9 10 11 12 |
import os
from transformers import BertTokenizerFast
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
TOKENIZER_DIR = os.path.join(CURRENT_DIR, "tokenizer")
tokenizer = BertTokenizerFast.from_pretrained(TOKENIZER_DIR)
# vocab_size = len(tokenizer.get_vocab())
# vocab_size = tokenizer.vocab_size
|