{ | |
"alpha_pattern": {}, | |
"auto_mapping": null, | |
"base_model_name_or_path": "meta-llama/Llama-3.2-11B-Vision-Instruct", | |
"bias": "none", | |
"eva_config": null, | |
"exclude_modules": null, | |
"fan_in_fan_out": null, | |
"inference_mode": true, | |
"init_lora_weights": true, | |
"layer_replication": null, | |
"layers_pattern": null, | |
"layers_to_transform": null, | |
"loftq_config": {}, | |
"lora_alpha": 256, | |
"lora_bias": false, | |
"lora_dropout": 0.05, | |
"megatron_config": null, | |
"megatron_core": "megatron.core", | |
"modules_to_save": null, | |
"peft_type": "LORA", | |
"r": 512, | |
"rank_pattern": {}, | |
"revision": null, | |
"target_modules": "language_model.model.layers.[\\d]+.(mlp|cross_attn|self_attn).(up|down|gate|q|k|v|o)_proj", | |
"task_type": "CAUSAL_LM", | |
"use_dora": false, | |
"use_rslora": true | |
} |