{ "vocab_size": 30522, "max_position_embeddings": 512, "sinusoidal_pos_embds": true, "n_layers": 6, "n_heads": 12, "dim": 768, "hidden_dim": 3072, "dropout": 0.1, "attention_dropout": 0.1, "activation": "gelu", "initializer_range": 0.02, "tie_weights_": true, "seq_classif_dropout": 0.2, "qa_dropout": 0.1 }