Spaces:
Running
Running
import os | |
from transformers import LlamaTokenizer | |
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) | |
TOKENIZER_DIR = os.path.join(CURRENT_DIR, "tokenizer") | |
tokenizer = LlamaTokenizer.from_pretrained(TOKENIZER_DIR) | |
tokenizer.comments = "比chinese_llama词典多一个`[PAD]`,请勿混用" |