{ "lora_alpha": 16, "lora_dropout": 0.05, "r": 8, "peft_type": "LORA", "task_type": "CAUSAL_LM", "bias": "none", "target_modules": [ "q_proj", "v_proj", "w1_proj", "w2_proj", "w3_proj" ], "experts": 8, "topk": 2 }