KhaldiAbderrhmane
commited on
Commit
•
470ed02
1
Parent(s):
83d1a46
Update config.json
Browse files- config.json +2 -2
config.json
CHANGED
@@ -49,7 +49,7 @@
|
|
49 |
"final_dropout": 0.1,
|
50 |
"hidden_act": "gelu",
|
51 |
"hidden_dropout": 0.1,
|
52 |
-
"
|
53 |
"initializer_range": 0.02,
|
54 |
"intermediate_size": 3072,
|
55 |
"layer_norm_eps": 1e-05,
|
@@ -61,7 +61,7 @@
|
|
61 |
"mask_time_min_masks": 2,
|
62 |
"mask_time_prob": 0.05,
|
63 |
"model_type": "hubert",
|
64 |
-
"num_attention_heads":
|
65 |
"num_classes": 6,
|
66 |
"num_conv_pos_embedding_groups": 16,
|
67 |
"num_conv_pos_embeddings": 128,
|
|
|
49 |
"final_dropout": 0.1,
|
50 |
"hidden_act": "gelu",
|
51 |
"hidden_dropout": 0.1,
|
52 |
+
"hidden_size_lstm": 128,
|
53 |
"initializer_range": 0.02,
|
54 |
"intermediate_size": 3072,
|
55 |
"layer_norm_eps": 1e-05,
|
|
|
61 |
"mask_time_min_masks": 2,
|
62 |
"mask_time_prob": 0.05,
|
63 |
"model_type": "hubert",
|
64 |
+
"num_attention_heads": 12,
|
65 |
"num_classes": 6,
|
66 |
"num_conv_pos_embedding_groups": 16,
|
67 |
"num_conv_pos_embeddings": 128,
|