{ | |
"alpha_pattern": {}, | |
"auto_mapping": { | |
"base_model_class": "LlamaForCausalLM", | |
"parent_library": "transformers.models.llama.modeling_llama" | |
}, | |
"base_model_name_or_path": "meta-llama/Meta-Llama-3-8B-Instruct", | |
"bias": "none", | |
"fan_in_fan_out": false, | |
"inference_mode": true, | |
"init_lora_weights": true, | |
"layer_replication": null, | |
"layers_pattern": null, | |
"layers_to_transform": null, | |
"loftq_config": {}, | |
"lora_alpha": 512, | |
"lora_dropout": 0.1, | |
"megatron_config": null, | |
"megatron_core": "megatron.core", | |
"modules_to_save": [ | |
"ln_f" | |
], | |
"peft_type": "LORA", | |
"r": 256, | |
"rank_pattern": {}, | |
"revision": null, | |
"target_modules": [ | |
"v_proj", | |
"o_proj", | |
"k_proj", | |
"q_proj" | |
], | |
"task_type": null, | |
"use_dora": false, | |
"use_rslora": false | |
} |