Upload tokenizer
Browse files- tokenizer.json +1 -0
tokenizer.json
CHANGED
@@ -336,6 +336,7 @@
|
|
336 |
"end_of_word_suffix": "",
|
337 |
"fuse_unk": false,
|
338 |
"byte_fallback": false,
|
|
|
339 |
"vocab": {
|
340 |
"!": 0,
|
341 |
"\"": 1,
|
|
|
336 |
"end_of_word_suffix": "",
|
337 |
"fuse_unk": false,
|
338 |
"byte_fallback": false,
|
339 |
+
"ignore_merges": false,
|
340 |
"vocab": {
|
341 |
"!": 0,
|
342 |
"\"": 1,
|