{ "activation_dropout": 0.0, "activation_function": "gelu", "architectures": [ "OPTModel" ], "vocab_size": 50272, "max_position_embeddings"=2048, "num_layers"=24, "num_attention_heads"=16, "ffn_dim"=4096, "layerdrop"=0.0, "activation_function"="gelu", "d_model"=1024, "embed_dim"=512, "dropout"=0.1, "attention_dropout"=0.0, "activation_dropout"=0.0, "init_std"=0.02, "scale_embedding"=false, "share_input_output_embed"=true, "use_cache"=false, "pad_token_id"=1, "bos_token_id"=0, "eos_token_id"=2, "decoder_start_token_id"=2, "forced_eos_token_id"=2, }