osanseviero HF staff pcuenq HF staff commited on
Commit
37477cd
1 Parent(s): e016f29

Fix tokenizer config (#2)

Browse files

- Fix tokenizer config (e3482c9f68c6e80f809eda88c4b8ed225044f962)


Co-authored-by: Pedro Cuenca <pcuenq@users.noreply.huggingface.co>

Files changed (1) hide show
  1. tokenizer_config.json +14 -1
tokenizer_config.json CHANGED
@@ -1 +1,14 @@
1
- {"do_lower_case": true, "do_basic_tokenize": true, "never_split": null, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "tokenizer_file": "/home/osanseviero/.cache/huggingface/transformers/75abb59d7a06f4f640158a9bfcde005264e59e8d566781ab1415b139d2e4c603.7f2721073f19841be16f41b0a70b600ca6b880c8f3df6f3535cbc704371bdfa4", "name_or_path": "distilbert-base-uncased", "tokenizer_class": "DistilBertTokenizer"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_lower_case": true,
3
+ "unk_token": "[UNK]",
4
+ "sep_token": "[SEP]",
5
+ "pad_token": "[PAD]",
6
+ "cls_token": "[CLS]",
7
+ "mask_token": "[MASK]",
8
+ "tokenize_chinese_chars": true,
9
+ "strip_accents": null,
10
+ "model_max_length": 512,
11
+ "special_tokens_map_file": null,
12
+ "name_or_path": "distilbert-base-uncased",
13
+ "tokenizer_class": "DistilBertTokenizer"
14
+ }