shikib commited on
Commit
8cffcfe
1 Parent(s): 06fc6e3

Fix tokenizer chat template

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +1 -1
  2. tokenizer_config.json +10 -3
special_tokens_map.json CHANGED
@@ -14,7 +14,7 @@
14
  "single_word": false
15
  },
16
  "pad_token": {
17
- "content": "</s>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
 
14
  "single_word": false
15
  },
16
  "pad_token": {
17
+ "content": "<s>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -29,15 +29,22 @@
29
  },
30
  "additional_special_tokens": [],
31
  "bos_token": "<s>",
32
- "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
35
  "legacy": true,
 
36
  "model_max_length": 1000000000000000019884624838656,
37
- "pad_token": "</s>",
 
 
 
38
  "sp_model_kwargs": {},
39
  "spaces_between_special_tokens": false,
 
40
  "tokenizer_class": "LlamaTokenizer",
 
 
41
  "unk_token": "<unk>",
42
- "use_default_system_prompt": false
43
  }
 
29
  },
30
  "additional_special_tokens": [],
31
  "bos_token": "<s>",
32
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{% if message['role'] == 'user' %}{{ bos_token + '\n<|user|>\n' + message['content'] + '\n' }}{% elif message['role'] == 'assistant' %}{{ '<|assistant|>\n' + message['content'] + eos_token }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\n' }}{% endif %}",
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
35
  "legacy": true,
36
+ "max_length": 2048,
37
  "model_max_length": 1000000000000000019884624838656,
38
+ "pad_to_multiple_of": null,
39
+ "pad_token": "<s>",
40
+ "pad_token_type_id": 0,
41
+ "padding_side": "right",
42
  "sp_model_kwargs": {},
43
  "spaces_between_special_tokens": false,
44
+ "stride": 0,
45
  "tokenizer_class": "LlamaTokenizer",
46
+ "truncation_side": "right",
47
+ "truncation_strategy": "longest_first",
48
  "unk_token": "<unk>",
49
+ "use_default_system_prompt": true
50
  }