File size: 300 Bytes
751936e |
1 2 3 4 5 6 7 8 9 10 |
import os
from transformers import LlamaTokenizer
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
TOKENIZER_DIR = os.path.join(CURRENT_DIR, "tokenizer")
tokenizer = LlamaTokenizer.from_pretrained(TOKENIZER_DIR)
tokenizer.comments = "比chinese_llama词典多一个`[PAD]`,请勿混用" |