{ | |
"activation_dropout": 0.0, | |
"activation_function": "gelu", | |
"architectures": [ | |
"PegasusForConditionalGeneration" | |
], | |
"attention_dropout": 0.0, | |
"d_model": 512, | |
"decoder_attention_heads": 8, | |
"decoder_ffn_dim": 3072, | |
"decoder_layerdrop": 0.0, | |
"decoder_layers": 12, | |
"decoder_start_token_id": 0, | |
"dropout": 0.15, | |
"encoder_attention_heads": 8, | |
"encoder_ffn_dim": 3072, | |
"encoder_layerdrop": 0.0, | |
"encoder_layers": 12, | |
"eos_token_id": 1, | |
"forced_eos_token_id": 1, | |
"init_std": 0.02, | |
"is_encoder_decoder": true, | |
"max_position_embeddings": 1024, | |
"model_type": "pegasus", | |
"num_hidden_layers": 12, | |
"pad_token_id": 0, | |
"scale_embedding": false, | |
"transformers_version": "4.30.2", | |
"use_cache": true, | |
"vocab_size": 32103 | |
} | |