File size: 203 Bytes
2463a8d
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
{
    "lora_alpha": 16,
    "lora_dropout": 0.05,
    "r": 8,
    "peft_type": "LORA",
    "task_type": "CAUSAL_LM",
    "bias": "none",
    "target_modules": [
        "q_proj",
        "v_proj"
    ]
}