Text Generation
Transformers
Safetensors
mistral
openchat
C-RLFT
conversational
text-generation-inference
4-bit precision
TheBloke commited on
Commit
2220dc3
1 Parent(s): 665db0b

GPTQ model commit

Browse files
config.json CHANGED
@@ -4,7 +4,7 @@
4
  "MistralForCausalLM"
5
  ],
6
  "bos_token_id": 1,
7
- "eos_token_id": 2,
8
  "hidden_act": "silu",
9
  "hidden_size": 4096,
10
  "initializer_range": 0.02,
 
4
  "MistralForCausalLM"
5
  ],
6
  "bos_token_id": 1,
7
+ "eos_token_id": 32000,
8
  "hidden_act": "silu",
9
  "hidden_size": 4096,
10
  "initializer_range": 0.02,
generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 32000,
5
+ "max_length": 8192,
6
+ "pad_token_id": 0,
7
+ "temperature": 0.5,
8
+ "transformers_version": "4.35.2"
9
+ }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7330943661dc208ebeeee950676118c84ab68c49d87b5cc2c9791f2aadee227e
3
  size 4158695048
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6afa1dbc4c454102d78c1f36a7a047fb4918b718e7c2d14e10aa325fdfec6bd9
3
  size 4158695048
tokenizer_config.json CHANGED
@@ -50,7 +50,7 @@
50
  "bos_token": "<s>",
51
  "chat_template": "{{ bos_token }}{% for message in messages %}{{ 'GPT4 Correct ' + message['role'].title() + ': ' + message['content'] + '<|end_of_turn|>'}}{% endfor %}{% if add_generation_prompt %}{{ 'GPT4 Correct Assistant:' }}{% endif %}",
52
  "clean_up_tokenization_spaces": false,
53
- "eos_token": "</s>",
54
  "legacy": true,
55
  "model_max_length": 1000000000000000019884624838656,
56
  "pad_token": null,
 
50
  "bos_token": "<s>",
51
  "chat_template": "{{ bos_token }}{% for message in messages %}{{ 'GPT4 Correct ' + message['role'].title() + ': ' + message['content'] + '<|end_of_turn|>'}}{% endfor %}{% if add_generation_prompt %}{{ 'GPT4 Correct Assistant:' }}{% endif %}",
52
  "clean_up_tokenization_spaces": false,
53
+ "eos_token": "<|end_of_turn|>",
54
  "legacy": true,
55
  "model_max_length": 1000000000000000019884624838656,
56
  "pad_token": null,