Andrey Kutuzov commited on
Commit
2eda15f
1 Parent(s): 347abbf
Files changed (2) hide show
  1. config.json +2 -1
  2. tokenizer.json +0 -0
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "BertModel"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "hidden_act": "gelu",
@@ -9,6 +9,7 @@
9
  "initializer_range": 0.02,
10
  "intermediate_size": 3072,
11
  "max_position_embeddings": 512,
 
12
  "num_attention_heads": 12,
13
  "num_hidden_layers": 12,
14
  "type_vocab_size": 2,
 
1
  {
2
  "architectures": [
3
+ "BertForMaskedLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "hidden_act": "gelu",
 
9
  "initializer_range": 0.02,
10
  "intermediate_size": 3072,
11
  "max_position_embeddings": 512,
12
+ "model_type": "bert",
13
  "num_attention_heads": 12,
14
  "num_hidden_layers": 12,
15
  "type_vocab_size": 2,
tokenizer.json DELETED
The diff for this file is too large to render. See raw diff