File size: 298 Bytes
9495a4f
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
import os
from transformers import AutoTokenizer


CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
TOKENIZER_DIR = os.path.join(CURRENT_DIR, "tokenizer")



# tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-180b")  # token
tokenizer = AutoTokenizer.from_pretrained(TOKENIZER_DIR)