|
{
|
|
"architectures": [
|
|
"BARTModel",
|
|
"TransformerModel"
|
|
],
|
|
"task": "translation",
|
|
"source_lang": "en",
|
|
"target_lang": "en",
|
|
"dropout": 0.1,
|
|
"attention_dropout": 0.1,
|
|
"activation_dropout": 0.0,
|
|
"encoder_layers": 12,
|
|
"decoder_layers": 12,
|
|
"encoder_embed_dim": 1024,
|
|
"decoder_embed_dim": 1024,
|
|
"encoder_ffn_embed_dim": 4096,
|
|
"decoder_ffn_embed_dim": 4096,
|
|
"encoder_attention_heads": 16,
|
|
"decoder_attention_heads": 16,
|
|
"encoder_normalize_before": false,
|
|
"decoder_normalize_before": false,
|
|
"encoder_learned_pos": true,
|
|
"decoder_learned_pos": true,
|
|
"share_decoder_input_output_embed": true,
|
|
"share_all_embeddings": true,
|
|
"static_position_embeddings": false,
|
|
"adaptive_input": false,
|
|
"adaptive_input_factor": 4,
|
|
"adaptive_input_cutoff": null,
|
|
"no_scale_embedding": true,
|
|
"layernorm_embedding": true,
|
|
"tie_adaptive_weights": false,
|
|
"tie_encoder_decoder": false,
|
|
"remove_bpe": null,
|
|
"eos_token": "</s>",
|
|
"bos_token": "<s>",
|
|
"pad_token": "<pad>",
|
|
"unk_token": "<unk>",
|
|
"mask_token": "<mask>",
|
|
"layernorm_eps": 1e-05,
|
|
"transformers_version": "4.14.0",
|
|
"fairseq_version": "0.10.4",
|
|
"model_type": "bart",
|
|
"revision": "main",
|
|
"use_cache": true
|
|
}
|
|
|