ShengdingHu's picture
Update config.json
610bcf0
raw
history blame contribute delete
471 Bytes
{
"backbone_checkpoint_name": "t5-large",
"backbone_class": "T5ForConditionalGeneration",
"backbone_hash": "baa014d5e81363ff48935ba78b5df374",
"common_structure": null,
"delta_type": "low_rank_adapter",
"low_rank_rank": 1,
"low_rank_w_init": "glorot-uniform",
"modified_modules": [
"SelfAttention",
"DenseReluDense"
],
"non_linearity": "gelu_new",
"opendelta_version": "0.0.1",
"reduction_factor": 32,
"transformers_version": "4.17.0"
}