danieldk-explosion commited on
Commit
17a51f9
1 Parent(s): c152f4e

float16 version of float32 model

Browse files
Files changed (3) hide show
  1. config.json +2 -0
  2. generation_config.json +1 -0
  3. model.safetensors +1 -1
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "LlamaForCausalLM"
4
  ],
@@ -15,6 +16,7 @@
15
  "num_attention_heads": 4,
16
  "num_hidden_layers": 5,
17
  "num_key_value_heads": 1,
 
18
  "pretraining_tp": 1,
19
  "rms_norm_eps": 1e-06,
20
  "rope_scaling": null,
 
1
  {
2
+ "_name_or_path": "explosion-testing/llama2-kv-sharing",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
16
  "num_attention_heads": 4,
17
  "num_hidden_layers": 5,
18
  "num_key_value_heads": 1,
19
+ "pad_token_id": 0,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-06,
22
  "rope_scaling": null,
generation_config.json CHANGED
@@ -2,5 +2,6 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
 
5
  "transformers_version": "4.38.2"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
+ "pad_token_id": 0,
6
  "transformers_version": "4.38.2"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bf46a45d56fb6d5374936aefdb278d7415ade7d9993cda382fda50cff373be9b
3
  size 6629856
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b0a2dd6970157ebd6d3eef57be42b1721175e3da4ebad826597f64423b73cb4
3
  size 6629856