Tom Aarsen
commited on
Commit
•
cb6a582
1
Parent(s):
55af33d
Remove now unused parameters
Browse files- config.json +0 -2
config.json
CHANGED
@@ -14,7 +14,6 @@
|
|
14 |
"decoder_bias": true,
|
15 |
"deterministic_flash_attn": false,
|
16 |
"embedding_dropout": 0.0,
|
17 |
-
"embedding_norm": true,
|
18 |
"eos_token_id": 50282,
|
19 |
"global_attn_every_n_layers": 3,
|
20 |
"global_rope_theta": 160000.0,
|
@@ -38,7 +37,6 @@
|
|
38 |
"pad_token_id": 50283,
|
39 |
"position_embedding_type": "absolute",
|
40 |
"sep_token_id": 50282,
|
41 |
-
"skip_first_prenorm": true,
|
42 |
"tie_word_embeddings": true,
|
43 |
"torch_dtype": "float32",
|
44 |
"transformers_version": "4.47.0.dev0",
|
|
|
14 |
"decoder_bias": true,
|
15 |
"deterministic_flash_attn": false,
|
16 |
"embedding_dropout": 0.0,
|
|
|
17 |
"eos_token_id": 50282,
|
18 |
"global_attn_every_n_layers": 3,
|
19 |
"global_rope_theta": 160000.0,
|
|
|
37 |
"pad_token_id": 50283,
|
38 |
"position_embedding_type": "absolute",
|
39 |
"sep_token_id": 50282,
|
|
|
40 |
"tie_word_embeddings": true,
|
41 |
"torch_dtype": "float32",
|
42 |
"transformers_version": "4.47.0.dev0",
|