from transformers import AutoTokenizer, AddedToken | |
tokenizer = AutoTokenizer.from_pretrained("./") | |
tokenizer.mask_token = AddedToken("<mask>", lstrip=True) | |
tokenizer.save_pretrained("./") | |
from transformers import AutoTokenizer, AddedToken | |
tokenizer = AutoTokenizer.from_pretrained("./") | |
tokenizer.mask_token = AddedToken("<mask>", lstrip=True) | |
tokenizer.save_pretrained("./") | |