Upload tokenizer
Browse files- tokenizer.json +0 -0
- tokenizer_config.json +1 -1
- vocab.txt +0 -0
tokenizer.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
CHANGED
@@ -43,7 +43,7 @@
|
|
43 |
},
|
44 |
"clean_up_tokenization_spaces": true,
|
45 |
"cls_token": "[CLS]",
|
46 |
-
"do_lower_case":
|
47 |
"mask_token": "[MASK]",
|
48 |
"model_max_length": 512,
|
49 |
"pad_token": "[PAD]",
|
|
|
43 |
},
|
44 |
"clean_up_tokenization_spaces": true,
|
45 |
"cls_token": "[CLS]",
|
46 |
+
"do_lower_case": false,
|
47 |
"mask_token": "[MASK]",
|
48 |
"model_max_length": 512,
|
49 |
"pad_token": "[PAD]",
|
vocab.txt
CHANGED
The diff for this file is too large to render.
See raw diff
|
|