Andy Janco commited on
Commit
2204b60
1 Parent(s): 818a9c1

add tokenizer

Browse files
Files changed (4) hide show
  1. merges.txt +0 -0
  2. tokenizer.json +0 -0
  3. tokenizer_config.json +1 -1
  4. vocab.json +0 -0
merges.txt CHANGED
The diff for this file is too large to render. See raw diff
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "trim_offsets": true, "special_tokens_map_file": null, "name_or_path": "yi-roberta-base-pretrained", "tokenizer_class": "RobertaTokenizer"}
1
+ {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "trim_offsets": true, "special_tokens_map_file": null, "name_or_path": "sr-roberta-base-pretrained", "tokenizer_class": "RobertaTokenizer"}
vocab.json CHANGED
The diff for this file is too large to render. See raw diff