xu-song's picture
update
9495a4f
raw
history blame
298 Bytes
import os
from transformers import AutoTokenizer
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
TOKENIZER_DIR = os.path.join(CURRENT_DIR, "tokenizer")
# tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-180b") # token
tokenizer = AutoTokenizer.from_pretrained(TOKENIZER_DIR)