system HF staff commited on
Commit
8943f66
1 Parent(s): 3bd1005

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +9 -15
config.json CHANGED
@@ -10,18 +10,17 @@
10
  "attention_dropout": 0.0,
11
  "bos_token_id": 0,
12
  "classif_dropout": 0.0,
13
- "d_model": 1024,
14
- "decoder_attention_heads": 16,
15
- "decoder_ffn_dim": 4096,
16
  "decoder_layerdrop": 0.0,
17
- "decoder_layers": 6,
18
  "decoder_start_token_id": 2,
19
  "dropout": 0.1,
20
- "early_stopping": true,
21
- "encoder_attention_heads": 16,
22
- "encoder_ffn_dim": 4096,
23
  "encoder_layerdrop": 0.0,
24
- "encoder_layers": 12,
25
  "eos_token_id": 2,
26
  "id2label": {
27
  "0": "LABEL_0",
@@ -35,16 +34,11 @@
35
  "LABEL_1": 1,
36
  "LABEL_2": 2
37
  },
38
- "length_penalty": 2.0,
39
- "max_length": 142,
40
  "max_position_embeddings": 1024,
41
- "min_length": 56,
42
  "model_type": "bart",
43
- "no_repeat_ngram_size": 3,
44
  "normalize_before": false,
45
  "normalize_embedding": true,
46
- "num_beams": 4,
47
- "num_hidden_layers": 12,
48
  "output_past": true,
49
  "pad_token_id": 1,
50
  "prefix": " ",
@@ -60,5 +54,5 @@
60
  "num_beams": 4
61
  }
62
  },
63
- "vocab_size": 50264
64
  }
10
  "attention_dropout": 0.0,
11
  "bos_token_id": 0,
12
  "classif_dropout": 0.0,
13
+ "d_model": 24,
14
+ "decoder_attention_heads": 2,
15
+ "decoder_ffn_dim": 16,
16
  "decoder_layerdrop": 0.0,
17
+ "decoder_layers": 1,
18
  "decoder_start_token_id": 2,
19
  "dropout": 0.1,
20
+ "encoder_attention_heads": 2,
21
+ "encoder_ffn_dim": 16,
 
22
  "encoder_layerdrop": 0.0,
23
+ "encoder_layers": 2,
24
  "eos_token_id": 2,
25
  "id2label": {
26
  "0": "LABEL_0",
34
  "LABEL_1": 1,
35
  "LABEL_2": 2
36
  },
 
 
37
  "max_position_embeddings": 1024,
 
38
  "model_type": "bart",
 
39
  "normalize_before": false,
40
  "normalize_embedding": true,
41
+ "num_hidden_layers": 2,
 
42
  "output_past": true,
43
  "pad_token_id": 1,
44
  "prefix": " ",
54
  "num_beams": 4
55
  }
56
  },
57
+ "vocab_size": 50265
58
  }