Upload config.json with huggingface_hub
Browse files- config.json +2 -3
config.json
CHANGED
@@ -26,7 +26,6 @@
|
|
26 |
"lm_head"
|
27 |
],
|
28 |
"export": {
|
29 |
-
"custom_mode": "fp8",
|
30 |
"kv_cache_group": [],
|
31 |
"pack_method": "reorder",
|
32 |
"weight_format": "real_quantized",
|
@@ -63,14 +62,14 @@
|
|
63 |
"layer_type_quant_config": {},
|
64 |
"pack_method": "reorder",
|
65 |
"quant_method": "quark",
|
66 |
-
"quant_mode":
|
67 |
},
|
68 |
"rms_norm_eps": 1e-06,
|
69 |
"rope_scaling": null,
|
70 |
"rope_theta": 10000.0,
|
71 |
"tie_word_embeddings": false,
|
72 |
"torch_dtype": "float16",
|
73 |
-
"transformers_version": "4.
|
74 |
"use_cache": true,
|
75 |
"vocab_size": 32000
|
76 |
}
|
|
|
26 |
"lm_head"
|
27 |
],
|
28 |
"export": {
|
|
|
29 |
"kv_cache_group": [],
|
30 |
"pack_method": "reorder",
|
31 |
"weight_format": "real_quantized",
|
|
|
62 |
"layer_type_quant_config": {},
|
63 |
"pack_method": "reorder",
|
64 |
"quant_method": "quark",
|
65 |
+
"quant_mode": "eager_mode"
|
66 |
},
|
67 |
"rms_norm_eps": 1e-06,
|
68 |
"rope_scaling": null,
|
69 |
"rope_theta": 10000.0,
|
70 |
"tie_word_embeddings": false,
|
71 |
"torch_dtype": "float16",
|
72 |
+
"transformers_version": "4.45.2",
|
73 |
"use_cache": true,
|
74 |
"vocab_size": 32000
|
75 |
}
|