TheBloke commited on
Commit
b01b9af
1 Parent(s): 1c16e35

AWQ model commit

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. generation_config.json +4 -2
  3. tokenizer_config.json +1 -1
config.json CHANGED
@@ -4,7 +4,7 @@
4
  "MistralForCausalLM"
5
  ],
6
  "bos_token_id": 1,
7
- "eos_token_id": 2,
8
  "hidden_act": "silu",
9
  "hidden_size": 4096,
10
  "initializer_range": 0.02,
 
4
  "MistralForCausalLM"
5
  ],
6
  "bos_token_id": 1,
7
+ "eos_token_id": 32000,
8
  "hidden_act": "silu",
9
  "hidden_size": 4096,
10
  "initializer_range": 0.02,
generation_config.json CHANGED
@@ -1,7 +1,9 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
- "eos_token_id": 2,
 
5
  "pad_token_id": 0,
 
6
  "transformers_version": "4.35.2"
7
- }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
+ "eos_token_id": 32000,
5
+ "max_length": 8192,
6
  "pad_token_id": 0,
7
+ "temperature": 0.5,
8
  "transformers_version": "4.35.2"
9
+ }
tokenizer_config.json CHANGED
@@ -50,7 +50,7 @@
50
  "bos_token": "<s>",
51
  "chat_template": "{{ bos_token }}{% for message in messages %}{{ 'GPT4 Correct ' + message['role'].title() + ': ' + message['content'] + '<|end_of_turn|>'}}{% endfor %}{% if add_generation_prompt %}{{ 'GPT4 Correct Assistant:' }}{% endif %}",
52
  "clean_up_tokenization_spaces": false,
53
- "eos_token": "</s>",
54
  "legacy": true,
55
  "model_max_length": 1000000000000000019884624838656,
56
  "pad_token": null,
 
50
  "bos_token": "<s>",
51
  "chat_template": "{{ bos_token }}{% for message in messages %}{{ 'GPT4 Correct ' + message['role'].title() + ': ' + message['content'] + '<|end_of_turn|>'}}{% endfor %}{% if add_generation_prompt %}{{ 'GPT4 Correct Assistant:' }}{% endif %}",
52
  "clean_up_tokenization_spaces": false,
53
+ "eos_token": "<|end_of_turn|>",
54
  "legacy": true,
55
  "model_max_length": 1000000000000000019884624838656,
56
  "pad_token": null,