Upload tokenizer_config.json with huggingface_hub
Browse files- tokenizer_config.json +4 -0
tokenizer_config.json
CHANGED
@@ -56,11 +56,15 @@
|
|
56 |
"cls_token": "[CLS]",
|
57 |
"do_lower_case": true,
|
58 |
"mask_token": "[MASK]",
|
|
|
59 |
"model_max_length": 512,
|
60 |
"pad_token": "[PAD]",
|
61 |
"sep_token": "[SEP]",
|
|
|
62 |
"strip_accents": null,
|
63 |
"tokenize_chinese_chars": true,
|
64 |
"tokenizer_class": "BertTokenizer",
|
|
|
|
|
65 |
"unk_token": "[UNK]"
|
66 |
}
|
|
|
56 |
"cls_token": "[CLS]",
|
57 |
"do_lower_case": true,
|
58 |
"mask_token": "[MASK]",
|
59 |
+
"max_length": 128,
|
60 |
"model_max_length": 512,
|
61 |
"pad_token": "[PAD]",
|
62 |
"sep_token": "[SEP]",
|
63 |
+
"stride": 0,
|
64 |
"strip_accents": null,
|
65 |
"tokenize_chinese_chars": true,
|
66 |
"tokenizer_class": "BertTokenizer",
|
67 |
+
"truncation_side": "right",
|
68 |
+
"truncation_strategy": "longest_first",
|
69 |
"unk_token": "[UNK]"
|
70 |
}
|