{ "base_model_name_or_path": "gpt2", "bias": "none", "enable_lora": [ true, false, true ], "fan_in_fan_out": true, "inference_mode": true, "lora_alpha": 32, "lora_dropout": 0.05, "merge_weights": false, "modules_to_save": null, "peft_type": "LORA", "r": 16, "target_modules": [ "c_attn" ], "task_type": "CAUSAL_LM" }