iuliaem commited on
Commit
807a2e6
1 Parent(s): 06a778a

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +70 -72
config.json CHANGED
@@ -1,73 +1,71 @@
1
  {
2
- "_name_or_path": "facebook/bart-large-xsum",
3
- "hidden_size": 32,
4
- "num_channels": 3,
5
- "num_classes": 10,
6
- "model_type": "bart",
7
- "architectures": ["BartForConditionalGeneration"],
8
- "tokenizer_class": "BartTokenizer",
9
- "d_model": 1024,
10
- "encoder_layers": 12,
11
- "decoder_layers": 12,
12
- "vocab_size": 50265,
13
- "activation_function": "gelu",
14
- "_num_labels": 3,
15
- "transformers_version": "4.41.2",
16
- "activation_dropout": 0.0,
17
- "add_bias_logits": false,
18
- "add_final_layer_norm": false,
19
- "attention_dropout": 0.0,
20
- "bos_token_id": 0,
21
- "classif_dropout": 0.0,
22
- "classifier_dropout": 0.0,
23
- "d_model": 1024,
24
- "decoder_attention_heads": 16,
25
- "decoder_ffn_dim": 4096,
26
- "decoder_layerdrop": 0.0,
27
- "decoder_start_token_id": 2,
28
- "dropout": 0.1,
29
- "early_stopping": true,
30
- "encoder_attention_heads": 16,
31
- "encoder_ffn_dim": 4096,
32
- "encoder_layerdrop": 0.0,
33
- "encoder_layers": 12,
34
- "eos_token_id": 2,
35
- "eos_token_ids": [
36
- 2
37
- ],
38
- "forced_eos_token_id": 2,
39
- "gradient_checkpointing": false,
40
- "id2label": {
41
- "0": "LABEL_0",
42
- "1": "LABEL_1",
43
- "2": "LABEL_2"
44
- },
45
- "init_std": 0.02,
46
- "is_encoder_decoder": true,
47
- "label2id": {
48
- "LABEL_0": 0,
49
- "LABEL_1": 1,
50
- "LABEL_2": 2
51
- },
52
- "max_length": 62,
53
- "max_position_embeddings": 1024,
54
- "min_length": 11,
55
- "no_repeat_ngram_size": 3,
56
- "normalize_before": false,
57
- "normalize_embedding": true,
58
- "num_beams": 6,
59
- "num_hidden_layers": 12,
60
- "output_past": true,
61
- "pad_token_id": 1,
62
- "prefix": " ",
63
- "replacing_rate": 0,
64
- "scale_embedding": false,
65
- "static_position_embeddings": false,
66
- "student_decoder_layers": null,
67
- "student_encoder_layers": null,
68
- "task_specific_params": {},
69
- "torch_dtype": "float32",
70
- "use_cache": true,
71
- }
72
-
73
-
 
1
  {
2
+ "_name_or_path": "facebook/bart-large-xsum",
3
+ "hidden_size": 32,
4
+ "num_channels": 3,
5
+ "num_classes": 10,
6
+ "model_type": "bart",
7
+ "architectures": [
8
+ "BartForConditionalGeneration"
9
+ ],
10
+ "tokenizer_class": "BartTokenizer",
11
+ "d_model": 1024,
12
+ "decoder_layers": 12,
13
+ "vocab_size": 50265,
14
+ "activation_function": "gelu",
15
+ "_num_labels": 3,
16
+ "transformers_version": "4.41.2",
17
+ "activation_dropout": 0,
18
+ "add_bias_logits": false,
19
+ "add_final_layer_norm": false,
20
+ "attention_dropout": 0,
21
+ "bos_token_id": 0,
22
+ "classif_dropout": 0,
23
+ "classifier_dropout": 0,
24
+ "decoder_attention_heads": 16,
25
+ "decoder_ffn_dim": 4096,
26
+ "decoder_layerdrop": 0,
27
+ "decoder_start_token_id": 2,
28
+ "dropout": 0.1,
29
+ "early_stopping": true,
30
+ "encoder_attention_heads": 16,
31
+ "encoder_ffn_dim": 4096,
32
+ "encoder_layerdrop": 0,
33
+ "encoder_layers": 12,
34
+ "eos_token_id": 2,
35
+ "eos_token_ids": [
36
+ 2
37
+ ],
38
+ "forced_eos_token_id": 2,
39
+ "gradient_checkpointing": false,
40
+ "id2label": {
41
+ "0": "LABEL_0",
42
+ "1": "LABEL_1",
43
+ "2": "LABEL_2"
44
+ },
45
+ "init_std": 0.02,
46
+ "is_encoder_decoder": true,
47
+ "label2id": {
48
+ "LABEL_0": 0,
49
+ "LABEL_1": 1,
50
+ "LABEL_2": 2
51
+ },
52
+ "max_length": 62,
53
+ "max_position_embeddings": 1024,
54
+ "min_length": 11,
55
+ "no_repeat_ngram_size": 3,
56
+ "normalize_before": false,
57
+ "normalize_embedding": true,
58
+ "num_beams": 6,
59
+ "num_hidden_layers": 12,
60
+ "output_past": true,
61
+ "pad_token_id": 1,
62
+ "prefix": " ",
63
+ "replacing_rate": 0,
64
+ "scale_embedding": false,
65
+ "static_position_embeddings": false,
66
+ "student_decoder_layers": null,
67
+ "student_encoder_layers": null,
68
+ "task_specific_params": {},
69
+ "torch_dtype": "float32",
70
+ "use_cache": true
71
+ }