File size: 430 Bytes
a2f0e91
1
{"vocab_size": 21248, "hidden_size": 2048, "num_hidden_layers": 24, "num_attention_heads": 8, "hidden_act": "gelu_new", "intermediate_size": 8192, "hidden_dropout_prob": 0.1, "attention_probs_dropout_prob": 0.1, "max_position_embeddings": 512, "type_vocab_size": 2, "initializer_range": 0.2, "layer_norm_eps": 1e-12, "gradient_checkpointing": false, "position_embedding_type": "absolute", "use_cache": false, "model_type": "bert"}