File size: 313 Bytes
2329af4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
{
  "dpo_beta": 0.1,
  "finetuning_type": "lora",
  "lora_alpha": 32.0,
  "lora_dropout": 0.1,
  "lora_rank": 8,
  "lora_target": [
    "q_proj",
    "v_proj"
  ],
  "name_module_trainable": "mlp",
  "num_hidden_layers": 32,
  "num_layer_trainable": 3,
  "ppo_score_norm": false,
  "resume_lora_training": true
}