d0rj commited on
Commit
8937e32
1 Parent(s): ab1ea8f

fix: tokenizer fix

Browse files
sentencepiece.bpe.model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cfc8146abe2a0488e9e2a0c56de7952f7c11ab059eca145a0a727afce0db2865
3
- size 5069051
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34e9d938fbab77d2bb4acd48953c66bb7ad2d0b675b4c0911e6ed25caf20acd6
3
+ size 1270564
special_tokens_map.json CHANGED
@@ -2,7 +2,13 @@
2
  "bos_token": "<s>",
3
  "cls_token": "<s>",
4
  "eos_token": "</s>",
5
- "mask_token": "<mask>",
 
 
 
 
 
 
6
  "pad_token": "<pad>",
7
  "sep_token": "</s>",
8
  "unk_token": "<unk>"
 
2
  "bos_token": "<s>",
3
  "cls_token": "<s>",
4
  "eos_token": "</s>",
5
+ "mask_token": {
6
+ "content": "<mask>",
7
+ "lstrip": true,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
  "pad_token": "<pad>",
13
  "sep_token": "</s>",
14
  "unk_token": "<unk>"
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -11,7 +11,7 @@
11
  "rstrip": false,
12
  "single_word": false
13
  },
14
- "model_max_length": 512,
15
  "pad_token": "<pad>",
16
  "sep_token": "</s>",
17
  "sp_model_kwargs": {},
 
11
  "rstrip": false,
12
  "single_word": false
13
  },
14
+ "model_max_length": 1000000000000000019884624838656,
15
  "pad_token": "<pad>",
16
  "sep_token": "</s>",
17
  "sp_model_kwargs": {},