jon-tow commited on
Commit
b20ef7f
1 Parent(s): 7991e5d

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -4
config.json CHANGED
@@ -9,15 +9,13 @@
9
  "model_type": "stablelm",
10
  "norm_eps": 1e-05,
11
  "num_attention_heads": 32,
12
- "num_heads": 32,
13
  "num_hidden_layers": 32,
14
  "num_key_value_heads": 32,
15
- "rope_pct": 0.25,
16
  "rope_theta": 10000,
17
- "rotary_scaling_factor": 1.0,
18
  "tie_word_embeddings": false,
19
  "torch_dtype": "bfloat16",
20
- "transformers_version": "4.36.2",
21
  "use_cache": true,
22
  "use_qkv_bias": false,
23
  "vocab_size": 50304
 
9
  "model_type": "stablelm",
10
  "norm_eps": 1e-05,
11
  "num_attention_heads": 32,
 
12
  "num_hidden_layers": 32,
13
  "num_key_value_heads": 32,
14
+ "partial_rotary_factor": 0.25,
15
  "rope_theta": 10000,
 
16
  "tie_word_embeddings": false,
17
  "torch_dtype": "bfloat16",
18
+ "transformers_version": "4.38.0.dev0",
19
  "use_cache": true,
20
  "use_qkv_bias": false,
21
  "vocab_size": 50304