File size: 183 Bytes
751936e
 
 
 
 
 
1
2
3
4
5
6

from transformers import AutoTokenizer, AutoModelForMaskedLM
tokenizer = AutoTokenizer.from_pretrained("bert-base-chinese")

tokens = tokenizer.tokenize("明天搜索")
print(tokens)