Gürkan Şahin (ETIYA) commited on
Commit
d8efeed
1 Parent(s): cc9aba3

config.json modified

Browse files
Files changed (1) hide show
  1. config.json +8 -29
config.json CHANGED
@@ -7,8 +7,16 @@
7
  "hidden_act": "gelu",
8
  "hidden_dropout_prob": 0.1,
9
  "hidden_size": 768,
 
 
 
 
10
  "initializer_range": 0.02,
11
  "intermediate_size": 3072,
 
 
 
 
12
  "layer_norm_eps": 1e-12,
13
  "max_position_embeddings": 512,
14
  "model_type": "bert",
@@ -18,32 +26,3 @@
18
  "type_vocab_size": 2,
19
  "vocab_size": 32000
20
  }
21
-
22
- {
23
- "architectures": [
24
- "BertForSequenceClassification"
25
- ],
26
- "attention_probs_dropout_prob": 0.1,
27
- "gradient_checkpointing": false,
28
- "hidden_act": "gelu",
29
- "hidden_dropout_prob": 0.1,
30
- "hidden_size": 768,
31
- "id2label": {
32
- "0": "positive",
33
- "1": "negative"
34
- },
35
- "initializer_range": 0.02,
36
- "intermediate_size": 3072,
37
- "label2id": {
38
- "positive": 0,
39
- "negative": 1
40
- },
41
- "layer_norm_eps": 1e-12,
42
- "max_position_embeddings": 512,
43
- "model_type": "bert",
44
- "num_attention_heads": 12,
45
- "num_hidden_layers": 12,
46
- "pad_token_id": 0,
47
- "type_vocab_size": 2,
48
- "vocab_size": 32000
49
- }
7
  "hidden_act": "gelu",
8
  "hidden_dropout_prob": 0.1,
9
  "hidden_size": 768,
10
+ "id2label": {
11
+ "0": "positive",
12
+ "1": "negative"
13
+ },
14
  "initializer_range": 0.02,
15
  "intermediate_size": 3072,
16
+ "label2id": {
17
+ "positive": 0,
18
+ "negative": 1
19
+ },
20
  "layer_norm_eps": 1e-12,
21
  "max_position_embeddings": 512,
22
  "model_type": "bert",
26
  "type_vocab_size": 2,
27
  "vocab_size": 32000
28
  }