from mindformers import AutoTokenizer

tokenizer = AutoTokenizer.from_pretrained("gpt2")

print(tokenizer("XXXXXXXX"))

tokenizer.add_tokens("XXXXXXXX", special_tokens=True)

print(tokenizer("XXXXXXXX"))
