yuewang-sf commited on
Commit
8027b7c
1 Parent(s): 04a5651

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -9
config.json CHANGED
@@ -12,16 +12,9 @@
12
  "dropout_rate": 0.1,
13
  "eos_token_id": 2,
14
  "feed_forward_proj": "relu",
15
- "gradient_checkpointing": false,
16
- "id2label": {
17
- "0": "LABEL_0"
18
- },
19
  "initializer_factor": 1.0,
20
  "is_encoder_decoder": true,
21
  "is_gated_act": false,
22
- "label2id": {
23
- "LABEL_0": 0
24
- },
25
  "layer_norm_epsilon": 1e-06,
26
  "model_type": "t5",
27
  "n_positions": 512,
@@ -32,9 +25,8 @@
32
  "pad_token_id": 0,
33
  "relative_attention_max_distance": 128,
34
  "relative_attention_num_buckets": 32,
35
-
36
  "torch_dtype": "float16",
37
  "transformers_version": "4.21.3",
38
  "use_cache": true,
39
  "vocab_size": 32100
40
- }
 
12
  "dropout_rate": 0.1,
13
  "eos_token_id": 2,
14
  "feed_forward_proj": "relu",
 
 
 
 
15
  "initializer_factor": 1.0,
16
  "is_encoder_decoder": true,
17
  "is_gated_act": false,
 
 
 
18
  "layer_norm_epsilon": 1e-06,
19
  "model_type": "t5",
20
  "n_positions": 512,
 
25
  "pad_token_id": 0,
26
  "relative_attention_max_distance": 128,
27
  "relative_attention_num_buckets": 32,
 
28
  "torch_dtype": "float16",
29
  "transformers_version": "4.21.3",
30
  "use_cache": true,
31
  "vocab_size": 32100
32
+ }