{ "adapter_path": "adapters", "lora_layers": 8, "lora_parameters": { "rank": 16, "alpha": 16, "dropout": 0.0, "scale": 1.0 } }