patrickvonplaten commited on
Commit
3ae3bf9
1 Parent(s): 35dbb41

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +8 -8
config.json CHANGED
@@ -2,7 +2,7 @@
2
  "architectures": [
3
  "Wav2Vec2ForMaskedLM"
4
  ],
5
- "conv_bias": false,
6
  "conv_dim": [
7
  512,
8
  512,
@@ -30,22 +30,22 @@
30
  2,
31
  2
32
  ],
33
- "do_stable_layer_norm": false,
34
  "feat_extract_activation": "gelu",
35
  "feat_extract_dropout": 0.0,
36
- "feat_extract_norm": "group",
37
  "hidden_act": "gelu",
38
  "hidden_dropout_prob": 0.1,
39
- "hidden_size": 768,
40
  "initializer_range": 0.02,
41
- "intermediate_size": 3072,
42
  "layer_norm_eps": 1e-05,
43
  "model_type": "wav2vec2",
44
- "num_attention_heads": 12,
45
  "num_conv_pos_embedding_groups": 16,
46
  "num_conv_pos_embeddings": 128,
47
  "num_feat_extract_layers": 7,
48
- "num_hidden_layers": 12,
49
  "transformers_version": "4.3.0.dev0",
50
  "vocab_size": 32
51
- }
 
2
  "architectures": [
3
  "Wav2Vec2ForMaskedLM"
4
  ],
5
+ "conv_bias": true,
6
  "conv_dim": [
7
  512,
8
  512,
 
30
  2,
31
  2
32
  ],
33
+ "do_stable_layer_norm": true,
34
  "feat_extract_activation": "gelu",
35
  "feat_extract_dropout": 0.0,
36
+ "feat_extract_norm": "layer",
37
  "hidden_act": "gelu",
38
  "hidden_dropout_prob": 0.1,
39
+ "hidden_size": 1024,
40
  "initializer_range": 0.02,
41
+ "intermediate_size": 4096,
42
  "layer_norm_eps": 1e-05,
43
  "model_type": "wav2vec2",
44
+ "num_attention_heads": 16,
45
  "num_conv_pos_embedding_groups": 16,
46
  "num_conv_pos_embeddings": 128,
47
  "num_feat_extract_layers": 7,
48
+ "num_hidden_layers": 24,
49
  "transformers_version": "4.3.0.dev0",
50
  "vocab_size": 32
51
+ }