File size: 353 Bytes
a433321
760764b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
{
    "base_model_name_or_path": "t5-large",
    "bias": "none",
    "fan_in_fan_out": false,
    "inference_mode": true,
    "init_lora_weights": true,
    "lora_alpha": 32,
    "lora_dropout": 0.1,
    "modules_to_save": null,
    "peft_type": "LORA",
    "r": 8,
    "target_modules": [
      "q",
      "v"
    ],
    "task_type": "SEQ_2_SEQ_LM"
  }