File size: 454 Bytes
5faabe5
 
 
0455091
 
5faabe5
 
0455091
 
 
 
5faabe5
 
 
 
 
 
 
0455091
 
 
5faabe5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
{
    "bias": "none",
    "peft_type": "MIXLORA",
    "r": 16,
    "lora_alpha": 32,
    "lora_dropout": 0.05,
    "target_modules": [
        "q_proj",
        "k_proj",
        "v_proj",
        "o_proj",
        "w1_proj",
        "w2_proj",
        "w3_proj"
    ],
    "routing_strategy": "mixtral",
    "num_experts": 8,
    "act_fn": "silu",
    "top_k": 2,
    "base_model_name_or_path": "/host_data/Llama-2-7b-hf",
    "task_type": "CAUSAL_LM"
}