import os from transformers import AutoTokenizer, BloomTokenizerFast CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) TOKENIZER_DIR = os.path.join(CURRENT_DIR, "tokenizer") tokenizer = AutoTokenizer.from_pretrained(TOKENIZER_DIR, trust_remote_code=True) # vocab_size = len(tokenizer.get_vocab()) # vocab_size = tokenizer.vocab_size