xu-song's picture
add more tokenizers
f4973d4
raw
history blame
124 Bytes
"""
"""
from transformers import BertTokenizer
tokenizer = BertTokenizer.from_pretrained('clue/roberta_chinese_clue_tiny')