{ "adapter_layers": 32, "adapter_len": 128, "auto_mapping": null, "base_model_name_or_path": "meta-llama/Llama-2-7b-chat-hf", "inference_mode": true, "peft_type": "ADAPTION_PROMPT", "revision": null, "target_modules": "self_attn", "task_type": "CAUSAL_LM" }