Upload config.json with huggingface_hub
Browse files- config.json +2 -3
config.json
CHANGED
@@ -26,7 +26,6 @@
|
|
26 |
"lm_head"
|
27 |
],
|
28 |
"export": {
|
29 |
-
"custom_mode": "fp8",
|
30 |
"kv_cache_group": [],
|
31 |
"pack_method": "reorder",
|
32 |
"weight_format": "real_quantized",
|
@@ -73,14 +72,14 @@
|
|
73 |
"layer_type_quant_config": {},
|
74 |
"pack_method": "reorder",
|
75 |
"quant_method": "quark",
|
76 |
-
"quant_mode":
|
77 |
},
|
78 |
"rms_norm_eps": 1e-06,
|
79 |
"rope_scaling": null,
|
80 |
"rope_theta": 10000.0,
|
81 |
"tie_word_embeddings": false,
|
82 |
"torch_dtype": "float16",
|
83 |
-
"transformers_version": "4.
|
84 |
"use_cache": true,
|
85 |
"vocab_size": 32000
|
86 |
}
|
|
|
26 |
"lm_head"
|
27 |
],
|
28 |
"export": {
|
|
|
29 |
"kv_cache_group": [],
|
30 |
"pack_method": "reorder",
|
31 |
"weight_format": "real_quantized",
|
|
|
72 |
"layer_type_quant_config": {},
|
73 |
"pack_method": "reorder",
|
74 |
"quant_method": "quark",
|
75 |
+
"quant_mode": "eager_mode"
|
76 |
},
|
77 |
"rms_norm_eps": 1e-06,
|
78 |
"rope_scaling": null,
|
79 |
"rope_theta": 10000.0,
|
80 |
"tie_word_embeddings": false,
|
81 |
"torch_dtype": "float16",
|
82 |
+
"transformers_version": "4.45.2",
|
83 |
"use_cache": true,
|
84 |
"vocab_size": 32000
|
85 |
}
|