{ "adapter_labels": [ "EU", "Indian", "ECHR", "UKC", "CAC" ], "alpha_pattern": {}, "auto_mapping": null, "base_model_name_or_path": "gpt2-xl", "bias": "none", "exclude_modules": null, "fan_in_fan_out": true, "inference_mode": true, "init_lora_weights": true, "layer_group": 3, "layer_replication": null, "layers_pattern": "h", "layers_to_transform": [ 46, 47 ], "loftq_config": {}, "lora_alpha": 26, "lora_dropout": 0.1, "megatron_config": null, "megatron_core": "megatron.core", "modules_to_save": null, "num_adapters_per_layer": 5, "peft_type": "LORA", "r": 13, "r_a": [ 14, 9, 7, 31, 3 ], "rank_pattern": {}, "revision": null, "target_modules": [ "c_proj", "c_attn" ], "task_type": "CAUSAL_LM", "use_dora": false, "use_rslora": false }