patrickvonplaten commited on
Commit
87ff340
1 Parent(s): 8409890

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -4
config.json CHANGED
@@ -46,9 +46,9 @@
46
  "gradient_checkpointing": true,
47
  "hidden_act": "gelu",
48
  "hidden_dropout": 0.0,
49
- "hidden_size": 768,
50
  "initializer_range": 0.02,
51
- "intermediate_size": 3072,
52
  "layer_norm_eps": 1e-05,
53
  "layerdrop": 0.0,
54
  "mask_channel_length": 10,
@@ -65,11 +65,11 @@
65
  "model_type": "wav2vec2",
66
  "no_mask_channel_overlap": false,
67
  "no_mask_time_overlap": false,
68
- "num_attention_heads": 12,
69
  "num_conv_pos_embedding_groups": 16,
70
  "num_conv_pos_embeddings": 128,
71
  "num_feat_extract_layers": 7,
72
- "num_hidden_layers": 12,
73
  "pad_token_id": 0,
74
  "transformers_version": "4.7.0.dev0"
75
  }
 
46
  "gradient_checkpointing": true,
47
  "hidden_act": "gelu",
48
  "hidden_dropout": 0.0,
49
+ "hidden_size": 384,
50
  "initializer_range": 0.02,
51
+ "intermediate_size": 1536,
52
  "layer_norm_eps": 1e-05,
53
  "layerdrop": 0.0,
54
  "mask_channel_length": 10,
 
65
  "model_type": "wav2vec2",
66
  "no_mask_channel_overlap": false,
67
  "no_mask_time_overlap": false,
68
+ "num_attention_heads": 6,
69
  "num_conv_pos_embedding_groups": 16,
70
  "num_conv_pos_embeddings": 128,
71
  "num_feat_extract_layers": 7,
72
+ "num_hidden_layers": 6,
73
  "pad_token_id": 0,
74
  "transformers_version": "4.7.0.dev0"
75
  }