{ "config": { "alpha": 1, "architecture": "lora", "attn_matrices": [ "k", "v" ], "composition_mode": "scale", "dropout": 0.0, "init_weights": "ia3", "intermediate_lora": true, "output_lora": false, "r": 1, "selfattn_lora": true, "use_gating": false }, "hidden_size": 512, "model_class": "T5ForConditionalGeneration", "model_name": "mrm8488/t5-small-finetuned-squadv2", "model_type": "t5", "name": "ia3_adapter", "version": "3.2.1" }