Maxime62 commited on
Commit
d0cde80
·
verified ·
1 Parent(s): bde51fe

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +1 -1
  2. tokenizer_config.json +3 -3
special_tokens_map.json CHANGED
@@ -13,7 +13,7 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "</s>",
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": "<s>",
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "add_bos_token": true,
3
- "add_eos_token": true,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
@@ -40,8 +40,8 @@
40
  "eos_token": "</s>",
41
  "legacy": false,
42
  "model_max_length": 1000000000000000019884624838656,
43
- "pad_token": "</s>",
44
- "padding_side": "left",
45
  "sp_model_kwargs": {},
46
  "spaces_between_special_tokens": false,
47
  "tokenizer_class": "LlamaTokenizer",
 
1
  {
2
  "add_bos_token": true,
3
+ "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
 
40
  "eos_token": "</s>",
41
  "legacy": false,
42
  "model_max_length": 1000000000000000019884624838656,
43
+ "pad_token": "<s>",
44
+ "padding_side": "right",
45
  "sp_model_kwargs": {},
46
  "spaces_between_special_tokens": false,
47
  "tokenizer_class": "LlamaTokenizer",