{ "do_lower_case": false, "bos_token": "", "eos_token": "", "unk_token": "", "sep_token": "", "pad_token": "", "cls_token": "", "mask_token": "", "split_by_punct": false, "sp_model_kwargs": {}, "special_tokens_map_file": null, "name_or_path": "/cognitive_comp/gaoxinyu/pretrained_model/deberta-base-sp", "tokenizer_class": "DebertaV2Tokenizer" }