{ "base_model_name_or_path": "facebook/opt-6.7b", "encoder_hidden_size": 4096, "inference_mode": true, "num_attention_heads": 32, "num_layers": 32, "num_transformer_submodules": 1, "num_virtual_tokens": 24, "peft_type": "PREFIX_TUNING", "prefix_projection": false, "task_type": "CAUSAL_LM", "token_dim": 4096 }