owaiskha9654
commited on
Commit
•
f8612ea
1
Parent(s):
976d61d
Update config.json
Browse files- config.json +1 -1
config.json
CHANGED
@@ -21,7 +21,7 @@
|
|
21 |
"layer_norm_eps": 1e-07,
|
22 |
"max_position_embeddings": 512,
|
23 |
"max_relative_positions": -1,
|
24 |
-
"model_type": "deberta-
|
25 |
"norm_rel_ebd": "layer_norm",
|
26 |
"num_attention_heads": 12,
|
27 |
"num_hidden_layers": 6,
|
|
|
21 |
"layer_norm_eps": 1e-07,
|
22 |
"max_position_embeddings": 512,
|
23 |
"max_relative_positions": -1,
|
24 |
+
"model_type": "deberta-v3",
|
25 |
"norm_rel_ebd": "layer_norm",
|
26 |
"num_attention_heads": 12,
|
27 |
"num_hidden_layers": 6,
|