AL-Sayed commited on
Commit
c93cb37
1 Parent(s): 992d7e1

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -13,7 +13,6 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "</s>",
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
 
16
  "unk_token": {
17
  "content": "<unk>",
18
  "lstrip": false,
tokenizer.json CHANGED
@@ -134,7 +134,6 @@
134
  "end_of_word_suffix": null,
135
  "fuse_unk": true,
136
  "byte_fallback": true,
137
- "ignore_merges": false,
138
  "vocab": {
139
  "<unk>": 0,
140
  "<s>": 1,
 
134
  "end_of_word_suffix": null,
135
  "fuse_unk": true,
136
  "byte_fallback": true,
 
137
  "vocab": {
138
  "<unk>": 0,
139
  "<s>": 1,
tokenizer_config.json CHANGED
@@ -28,11 +28,10 @@
28
  }
29
  },
30
  "bos_token": "<s>",
31
- "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "</s>",
34
- "model_max_length": 1024,
35
- "pad_token": "</s>",
36
  "sp_model_kwargs": {},
37
  "tokenizer_class": "LlamaTokenizer",
38
  "unk_token": "<unk>",
 
28
  }
29
  },
30
  "bos_token": "<s>",
 
31
  "clean_up_tokenization_spaces": false,
32
  "eos_token": "</s>",
33
+ "model_max_length": 1000000000000000019884624838656,
34
+ "pad_token": null,
35
  "sp_model_kwargs": {},
36
  "tokenizer_class": "LlamaTokenizer",
37
  "unk_token": "<unk>",