File size: 516 Bytes
fd768f4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
{
"config": {
"alpha": 16,
"architecture": "lora",
"attn_matrices": [
"q",
"v"
],
"composition_mode": "add",
"dropout": 0.0,
"init_weights": "lora",
"intermediate_lora": true,
"output_lora": true,
"r": 8,
"selfattn_lora": true,
"use_gating": false
},
"hidden_size": 1024,
"model_class": "T5ForConditionalGeneration",
"model_name": "google/flan-t5-large",
"model_type": "t5",
"name": "flant5-large-0ex-bridging-1epochs",
"version": "3.2.1"
} |