{ | |
"alpha_pattern": {}, | |
"auto_mapping": { | |
"base_model_class": "GPTNeoXForCausalLM", | |
"parent_library": "transformers.models.gpt_neox.modeling_gpt_neox" | |
}, | |
"base_model_name_or_path": "EleutherAI/pythia-1b", | |
"bias": "none", | |
"fan_in_fan_out": false, | |
"inference_mode": true, | |
"init_lora_weights": true, | |
"layers_pattern": null, | |
"layers_to_transform": null, | |
"loftq_config": {}, | |
"lora_alpha": 8, | |
"lora_dropout": 0.0, | |
"megatron_config": null, | |
"megatron_core": "megatron.core", | |
"modules_to_save": null, | |
"peft_type": "LORA", | |
"r": 8, | |
"rank_pattern": {}, | |
"revision": null, | |
"target_modules": [ | |
"dense_h_to_4h", | |
"query_key_value", | |
"dense_4h_to_h" | |
], | |
"task_type": null | |
} |