Splend1dchan commited on
Commit
327d25d
1 Parent(s): f4c9eed

update config

Browse files
Files changed (1) hide show
  1. config.json +36 -17
config.json CHANGED
@@ -1,19 +1,38 @@
1
  {
2
- "model_type": "deberta",
3
- "attention_probs_dropout_prob": 0.1,
4
- "hidden_act": "gelu",
5
- "hidden_dropout_prob": 0.1,
6
- "hidden_size": 1024,
7
- "initializer_range": 0.02,
8
- "intermediate_size": 4096,
9
- "max_position_embeddings": 512,
10
- "relative_attention": true,
11
- "pos_att_type": "c2p|p2c",
12
- "layer_norm_eps": 1e-7,
13
- "max_relative_positions": -1,
14
- "position_biased_input": false,
15
- "num_attention_heads": 16,
16
- "num_hidden_layers": 24,
17
- "type_vocab_size": 0,
18
- "vocab_size": 50265
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  }
 
1
  {
2
+ "_name_or_path": "Splend1dchan/deberta-large-slue-goldtrascription-e50",
3
+ "attention_probs_dropout_prob": 0.1,
4
+ "hidden_act": "gelu",
5
+ "hidden_dropout_prob": 0.1,
6
+ "hidden_size": 1024,
7
+ "id2label": {
8
+ "0": "Negative",
9
+ "1": "Neutral",
10
+ "2": "Positive"
11
+ },
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 4096,
14
+ "label2id": {
15
+ "Negative": 0,
16
+ "Neutral": 1,
17
+ "Positive": 2
18
+ },
19
+ "layer_norm_eps": 1e-07,
20
+ "max_position_embeddings": 512,
21
+ "max_relative_positions": -1,
22
+ "model_type": "deberta",
23
+ "num_attention_heads": 16,
24
+ "num_hidden_layers": 24,
25
+ "pad_token_id": 0,
26
+ "pooler_dropout": 0,
27
+ "pooler_hidden_act": "gelu",
28
+ "pooler_hidden_size": 1024,
29
+ "pos_att_type": [
30
+ "c2p",
31
+ "p2c"
32
+ ],
33
+ "position_biased_input": false,
34
+ "relative_attention": true,
35
+ "transformers_version": "4.12.2",
36
+ "type_vocab_size": 0,
37
+ "vocab_size": 50265
38
  }