phlippseitz commited on
Commit
c633f69
1 Parent(s): 9965291

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +128 -0
config.json ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "facebook/bart-base",
3
+ "activation_dropout": 0.1,
4
+ "activation_function": "gelu",
5
+ "adapters": {
6
+ "adapters": {
7
+ "narrativeqa": "9076f36a74755ac4"
8
+ },
9
+ "config_map": {
10
+ "9076f36a74755ac4": {
11
+ "adapter_residual_before_ln": false,
12
+ "cross_adapter": false,
13
+ "factorized_phm_W": true,
14
+ "factorized_phm_rule": false,
15
+ "hypercomplex_nonlinearity": "glorot-uniform",
16
+ "init_weights": "bert",
17
+ "inv_adapter": null,
18
+ "inv_adapter_reduction_factor": null,
19
+ "is_parallel": false,
20
+ "learn_phm": true,
21
+ "leave_out": [],
22
+ "ln_after": false,
23
+ "ln_before": false,
24
+ "mh_adapter": false,
25
+ "non_linearity": "relu",
26
+ "original_ln_after": true,
27
+ "original_ln_before": true,
28
+ "output_adapter": true,
29
+ "phm_bias": true,
30
+ "phm_c_init": "normal",
31
+ "phm_dim": 4,
32
+ "phm_init_range": 0.0001,
33
+ "phm_layer": false,
34
+ "phm_rank": 1,
35
+ "reduction_factor": 16,
36
+ "residual_before_ln": true,
37
+ "scaling": 1.0,
38
+ "shared_W_phm": false,
39
+ "shared_phm_rule": true,
40
+ "use_gating": false
41
+ }
42
+ },
43
+ "fusion_config_map": {},
44
+ "fusions": {}
45
+ },
46
+ "add_bias_logits": false,
47
+ "add_final_layer_norm": false,
48
+ "architectures": [
49
+ "BartModelWithHeads"
50
+ ],
51
+ "attention_dropout": 0.1,
52
+ "bos_token_id": 0,
53
+ "classif_dropout": 0.1,
54
+ "classifier_dropout": 0.0,
55
+ "d_model": 768,
56
+ "decoder_attention_heads": 12,
57
+ "decoder_ffn_dim": 3072,
58
+ "decoder_layerdrop": 0.0,
59
+ "decoder_layers": 6,
60
+ "decoder_start_token_id": 2,
61
+ "dropout": 0.1,
62
+ "early_stopping": true,
63
+ "encoder_attention_heads": 12,
64
+ "encoder_ffn_dim": 3072,
65
+ "encoder_layerdrop": 0.0,
66
+ "encoder_layers": 6,
67
+ "eos_token_id": 2,
68
+ "forced_bos_token_id": 0,
69
+ "forced_eos_token_id": 2,
70
+ "gradient_checkpointing": false,
71
+ "id2label": {
72
+ "0": "LABEL_0",
73
+ "1": "LABEL_1",
74
+ "2": "LABEL_2"
75
+ },
76
+ "init_std": 0.02,
77
+ "is_encoder_decoder": true,
78
+ "label2id": {
79
+ "LABEL_0": 0,
80
+ "LABEL_1": 1,
81
+ "LABEL_2": 2
82
+ },
83
+ "max_position_embeddings": 1024,
84
+ "model_type": "bart",
85
+ "no_repeat_ngram_size": 3,
86
+ "normalize_before": false,
87
+ "normalize_embedding": true,
88
+ "num_beams": 4,
89
+ "num_hidden_layers": 6,
90
+ "pad_token_id": 1,
91
+ "prediction_heads": {
92
+ "narrativeqa": {
93
+ "activation_function": null,
94
+ "bias": false,
95
+ "head_type": "seq2seq_lm",
96
+ "label2id": null,
97
+ "layer_norm": false,
98
+ "layers": 1,
99
+ "shift_labels": false,
100
+ "vocab_size": 50265
101
+ }
102
+ },
103
+ "scale_embedding": false,
104
+ "task_specific_params": {
105
+ "summarization": {
106
+ "length_penalty": 1.0,
107
+ "max_length": 128,
108
+ "min_length": 12,
109
+ "num_beams": 4
110
+ },
111
+ "summarization_cnn": {
112
+ "length_penalty": 2.0,
113
+ "max_length": 142,
114
+ "min_length": 56,
115
+ "num_beams": 4
116
+ },
117
+ "summarization_xsum": {
118
+ "length_penalty": 1.0,
119
+ "max_length": 62,
120
+ "min_length": 11,
121
+ "num_beams": 6
122
+ }
123
+ },
124
+ "torch_dtype": "float32",
125
+ "transformers_version": "4.21.3",
126
+ "use_cache": false,
127
+ "vocab_size": 50265
128
+ }