Upload tokenizer
Browse files- tokenizer_config.json +4 -0
tokenizer_config.json
CHANGED
@@ -3005,8 +3005,12 @@
|
|
3005 |
"clean_up_tokenization_spaces": true,
|
3006 |
"eos_token": "</s>",
|
3007 |
"mask_token": "<mask>",
|
|
|
3008 |
"model_max_length": 1000000000000000019884624838656,
|
3009 |
"pad_token": "<pad>",
|
|
|
3010 |
"tokenizer_class": "PreTrainedTokenizerFast",
|
|
|
|
|
3011 |
"unk_token": "<unk>"
|
3012 |
}
|
|
|
3005 |
"clean_up_tokenization_spaces": true,
|
3006 |
"eos_token": "</s>",
|
3007 |
"mask_token": "<mask>",
|
3008 |
+
"max_length": 1024,
|
3009 |
"model_max_length": 1000000000000000019884624838656,
|
3010 |
"pad_token": "<pad>",
|
3011 |
+
"stride": 0,
|
3012 |
"tokenizer_class": "PreTrainedTokenizerFast",
|
3013 |
+
"truncation_side": "right",
|
3014 |
+
"truncation_strategy": "longest_first",
|
3015 |
"unk_token": "<unk>"
|
3016 |
}
|