at676 commited on
Commit
b5de5f0
·
verified ·
1 Parent(s): da93a52

3266ecd9655d1adbed198bf1bc7f49fcc5d92d48cfd344c57e1e96394ce2176e

Browse files
config.json CHANGED
@@ -4,6 +4,7 @@
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
 
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
9
  "hidden_act": "silu",
@@ -26,14 +27,15 @@
26
  "model_version": 1,
27
  "outlier_channel_split": false,
28
  "packsz": 4,
29
- "rescale_WH": false
 
30
  },
31
  "rms_norm_eps": 1e-05,
32
  "rope_scaling": null,
33
  "rope_theta": 10000.0,
34
  "tie_word_embeddings": false,
35
  "torch_dtype": "float16",
36
- "transformers_version": "4.34.0",
37
  "use_cache": true,
38
  "vocab_size": 32000
39
  }
 
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
 
27
  "model_version": 1,
28
  "outlier_channel_split": false,
29
  "packsz": 4,
30
+ "rescale_WH": false,
31
+ "resid_scale_override": -1
32
  },
33
  "rms_norm_eps": 1e-05,
34
  "rope_scaling": null,
35
  "rope_theta": 10000.0,
36
  "tie_word_embeddings": false,
37
  "torch_dtype": "float16",
38
+ "transformers_version": "4.36.2",
39
  "use_cache": true,
40
  "vocab_size": 32000
41
  }
generation_config.json CHANGED
@@ -6,5 +6,5 @@
6
  "pad_token_id": 0,
7
  "temperature": 0.6,
8
  "top_p": 0.9,
9
- "transformers_version": "4.34.0"
10
  }
 
6
  "pad_token_id": 0,
7
  "temperature": 0.6,
8
  "top_p": 0.9,
9
+ "transformers_version": "4.36.2"
10
  }
model.safetensors.index.json CHANGED
The diff for this file is too large to render. See raw diff