abhishek HF staff commited on
Commit
9001261
1 Parent(s): 3abdc6f

fix config

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -16,7 +16,7 @@
16
  "intermediate_size": 3072,
17
  "layer_norm_eps": 1e-12,
18
  "max_position_embeddings": 512,
19
- "model_type": "conv_bert",
20
  "num_attention_heads": 12,
21
  "num_groups": 1,
22
  "num_hidden_layers": 12,
 
16
  "intermediate_size": 3072,
17
  "layer_norm_eps": 1e-12,
18
  "max_position_embeddings": 512,
19
+ "model_type": "convbert",
20
  "num_attention_heads": 12,
21
  "num_groups": 1,
22
  "num_hidden_layers": 12,