rishiraj commited on
Commit
cf9ecf6
1 Parent(s): 06db535

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +0 -1
  2. tokenizer_config.json +2 -3
special_tokens_map.json CHANGED
@@ -18,7 +18,6 @@
18
  "rstrip": false,
19
  "single_word": false
20
  },
21
- "pad_token": "</s>",
22
  "unk_token": {
23
  "content": "<unk>",
24
  "lstrip": false,
 
18
  "rstrip": false,
19
  "single_word": false
20
  },
 
21
  "unk_token": {
22
  "content": "<unk>",
23
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -33,12 +33,11 @@
33
  "</s>"
34
  ],
35
  "bos_token": "<s>",
36
- "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
37
  "clean_up_tokenization_spaces": false,
38
  "eos_token": "</s>",
39
  "legacy": true,
40
- "model_max_length": 2048,
41
- "pad_token": "</s>",
42
  "sp_model_kwargs": {},
43
  "spaces_between_special_tokens": false,
44
  "tokenizer_class": "LlamaTokenizer",
 
33
  "</s>"
34
  ],
35
  "bos_token": "<s>",
 
36
  "clean_up_tokenization_spaces": false,
37
  "eos_token": "</s>",
38
  "legacy": true,
39
+ "model_max_length": 1000000000000000019884624838656,
40
+ "pad_token": null,
41
  "sp_model_kwargs": {},
42
  "spaces_between_special_tokens": false,
43
  "tokenizer_class": "LlamaTokenizer",