Commit
•
345be52
1
Parent(s):
e3b488d
Delete mlora-train.json
Browse files- mlora-train.json +0 -39
mlora-train.json
DELETED
@@ -1,39 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"cutoff_len": 1024,
|
3 |
-
"save_step": 2000,
|
4 |
-
"early_stop_test_step": 2000,
|
5 |
-
"train_lora_candidate_num": 1,
|
6 |
-
"train_lora_simultaneously_num": 1,
|
7 |
-
"train_strategy": "optim",
|
8 |
-
"lora": [
|
9 |
-
{
|
10 |
-
"name": "alpaca-mixlora-7b",
|
11 |
-
"optim": "adamw",
|
12 |
-
"lr": 3e-4,
|
13 |
-
"batch_size": 16,
|
14 |
-
"micro_batch_size": 4,
|
15 |
-
"test_batch_size": 64,
|
16 |
-
"num_epochs": 2,
|
17 |
-
"r": 8,
|
18 |
-
"lora_alpha": 16,
|
19 |
-
"lora_dropout": 0.05,
|
20 |
-
"target_modules": {
|
21 |
-
"q_proj": false,
|
22 |
-
"k_proj": false,
|
23 |
-
"v_proj": false,
|
24 |
-
"o_proj": false,
|
25 |
-
"w1_proj": true,
|
26 |
-
"w2_proj": true,
|
27 |
-
"w3_proj": true
|
28 |
-
},
|
29 |
-
"routing_strategy": "mixtral",
|
30 |
-
"num_experts": 8,
|
31 |
-
"top_k": 3,
|
32 |
-
"act_fn": "silu",
|
33 |
-
"data": "yahma/alpaca-cleaned",
|
34 |
-
"prompt": "template/alpaca.json",
|
35 |
-
"group_by_length": false,
|
36 |
-
"expand_side": "right"
|
37 |
-
}
|
38 |
-
]
|
39 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|