killawhale2
commited on
Commit
•
d460cf0
1
Parent(s):
6d1672c
fix chat template (#2)
Browse files- fix: change chat template and add pad token id (46c4cf95947194c75dbd551e533ca141c8b8ee07)
- config.json +1 -0
- generation_config.json +1 -1
- tokenizer_config.json +2 -2
config.json
CHANGED
@@ -6,6 +6,7 @@
|
|
6 |
"attention_bias": false,
|
7 |
"bos_token_id": 1,
|
8 |
"eos_token_id": 2,
|
|
|
9 |
"hidden_act": "silu",
|
10 |
"hidden_size": 4096,
|
11 |
"initializer_range": 0.02,
|
|
|
6 |
"attention_bias": false,
|
7 |
"bos_token_id": 1,
|
8 |
"eos_token_id": 2,
|
9 |
+
"pad_token_id": 2,
|
10 |
"hidden_act": "silu",
|
11 |
"hidden_size": 4096,
|
12 |
"initializer_range": 0.02,
|
generation_config.json
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
|
|
5 |
"transformers_version": "4.35.2",
|
6 |
"use_cache": false
|
7 |
}
|
8 |
-
|
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
+
"pad_token_id": 2,
|
6 |
"transformers_version": "4.35.2",
|
7 |
"use_cache": false
|
8 |
}
|
|
tokenizer_config.json
CHANGED
@@ -28,13 +28,13 @@
|
|
28 |
}
|
29 |
},
|
30 |
"additional_special_tokens": [],
|
31 |
-
"chat_template": "{%
|
32 |
"bos_token": "<s>",
|
33 |
"clean_up_tokenization_spaces": false,
|
34 |
"eos_token": "</s>",
|
35 |
"legacy": true,
|
36 |
"model_max_length": 1000000000000000019884624838656,
|
37 |
-
"pad_token":
|
38 |
"sp_model_kwargs": {},
|
39 |
"spaces_between_special_tokens": false,
|
40 |
"tokenizer_class": "LlamaTokenizer",
|
|
|
28 |
}
|
29 |
},
|
30 |
"additional_special_tokens": [],
|
31 |
+
"chat_template": "{% for message in messages %}{% if message['role'] == 'system' %}{% if message['content']%}{{'### System:\n' + message['content']+'\n\n'}}{% endif %}{% elif message['role'] == 'user' %}{{'### User:\n' + message['content']+'\n\n'}}{% elif message['role'] == 'assistant' %}{{'### Assistant:\n' + message['content']}}{% endif %}{% if loop.last and add_generation_prompt %}{{ '### Assistant:\n' }}{% endif %}{% endfor %}",
|
32 |
"bos_token": "<s>",
|
33 |
"clean_up_tokenization_spaces": false,
|
34 |
"eos_token": "</s>",
|
35 |
"legacy": true,
|
36 |
"model_max_length": 1000000000000000019884624838656,
|
37 |
+
"pad_token": "</s>",
|
38 |
"sp_model_kwargs": {},
|
39 |
"spaces_between_special_tokens": false,
|
40 |
"tokenizer_class": "LlamaTokenizer",
|