upload
Browse files- config.json +2 -2
- generation_config.json +4 -1
config.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
{
|
| 2 |
"architectures": [
|
| 3 |
-
"
|
| 4 |
],
|
| 5 |
"attention_bias": false,
|
| 6 |
"attention_dropout": 0.0,
|
|
@@ -24,7 +24,7 @@
|
|
| 24 |
"moe_intermediate_size": 2048,
|
| 25 |
"moe_layer_freq": 1,
|
| 26 |
"n_group": 8,
|
| 27 |
-
"n_routed_experts":
|
| 28 |
"n_shared_experts": 1,
|
| 29 |
"norm_topk_prob": true,
|
| 30 |
"num_attention_heads": 128,
|
|
|
|
| 1 |
{
|
| 2 |
"architectures": [
|
| 3 |
+
"DeepseekV3Model"
|
| 4 |
],
|
| 5 |
"attention_bias": false,
|
| 6 |
"attention_dropout": 0.0,
|
|
|
|
| 24 |
"moe_intermediate_size": 2048,
|
| 25 |
"moe_layer_freq": 1,
|
| 26 |
"n_group": 8,
|
| 27 |
+
"n_routed_experts": 256,
|
| 28 |
"n_shared_experts": 1,
|
| 29 |
"norm_topk_prob": true,
|
| 30 |
"num_attention_heads": 128,
|
generation_config.json
CHANGED
|
@@ -2,5 +2,8 @@
|
|
| 2 |
"_from_model_config": true,
|
| 3 |
"bos_token_id": 0,
|
| 4 |
"eos_token_id": 1,
|
| 5 |
-
"
|
|
|
|
|
|
|
|
|
|
| 6 |
}
|
|
|
|
| 2 |
"_from_model_config": true,
|
| 3 |
"bos_token_id": 0,
|
| 4 |
"eos_token_id": 1,
|
| 5 |
+
"do_sample": true,
|
| 6 |
+
"temperature": 0.6,
|
| 7 |
+
"top_p": 0.95,
|
| 8 |
+
"transformers_version": "4.46.3"
|
| 9 |
}
|