{ "emb_dim": 2048, "n_layers": 12, "n_heads": 16, "dropout": 0.1, "attention_dropout": 0.1, "gelu_activation": true, "sinusoidal_embeddings": false, "asm": false, "bos_index": 0, "eos_index": 1, "pad_index": 2, "unk_index": 3, "mask_index": 5, "n_langs": 1, "n_words": 30145 }