Update config.json
Browse files- config.json +5 -10
config.json
CHANGED
@@ -8,6 +8,11 @@
|
|
8 |
"1": "neutral",
|
9 |
"2": "contradiction"
|
10 |
},
|
|
|
|
|
|
|
|
|
|
|
11 |
"attention_probs_dropout_prob": 0.1,
|
12 |
"classifiers_size": [
|
13 |
[
|
@@ -1790,18 +1795,8 @@
|
|
1790 |
"hidden_act": "gelu",
|
1791 |
"hidden_dropout_prob": 0.1,
|
1792 |
"hidden_size": 768,
|
1793 |
-
"id2label": {
|
1794 |
-
"0": "LABEL_0",
|
1795 |
-
"1": "LABEL_1",
|
1796 |
-
"2": "LABEL_2"
|
1797 |
-
},
|
1798 |
"initializer_range": 0.02,
|
1799 |
"intermediate_size": 3072,
|
1800 |
-
"label2id": {
|
1801 |
-
"LABEL_0": 0,
|
1802 |
-
"LABEL_1": 1,
|
1803 |
-
"LABEL_2": 2
|
1804 |
-
},
|
1805 |
"layer_norm_eps": 1e-07,
|
1806 |
"max_position_embeddings": 512,
|
1807 |
"max_relative_positions": -1,
|
|
|
8 |
"1": "neutral",
|
9 |
"2": "contradiction"
|
10 |
},
|
11 |
+
"label2id": {
|
12 |
+
"entailment": 0,
|
13 |
+
"neutral": 1,
|
14 |
+
"contradiction": 2
|
15 |
+
},
|
16 |
"attention_probs_dropout_prob": 0.1,
|
17 |
"classifiers_size": [
|
18 |
[
|
|
|
1795 |
"hidden_act": "gelu",
|
1796 |
"hidden_dropout_prob": 0.1,
|
1797 |
"hidden_size": 768,
|
|
|
|
|
|
|
|
|
|
|
1798 |
"initializer_range": 0.02,
|
1799 |
"intermediate_size": 3072,
|
|
|
|
|
|
|
|
|
|
|
1800 |
"layer_norm_eps": 1e-07,
|
1801 |
"max_position_embeddings": 512,
|
1802 |
"max_relative_positions": -1,
|