phlippseitz commited on
Commit
304b6d3
1 Parent(s): a089641

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +80 -0
config.json ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "roberta-base",
3
+ "adapters": {
4
+ "adapters": {
5
+ "drop": "9076f36a74755ac4"
6
+ },
7
+ "config_map": {
8
+ "9076f36a74755ac4": {
9
+ "adapter_residual_before_ln": false,
10
+ "cross_adapter": false,
11
+ "factorized_phm_W": true,
12
+ "factorized_phm_rule": false,
13
+ "hypercomplex_nonlinearity": "glorot-uniform",
14
+ "init_weights": "bert",
15
+ "inv_adapter": null,
16
+ "inv_adapter_reduction_factor": null,
17
+ "is_parallel": false,
18
+ "learn_phm": true,
19
+ "leave_out": [],
20
+ "ln_after": false,
21
+ "ln_before": false,
22
+ "mh_adapter": false,
23
+ "non_linearity": "relu",
24
+ "original_ln_after": true,
25
+ "original_ln_before": true,
26
+ "output_adapter": true,
27
+ "phm_bias": true,
28
+ "phm_c_init": "normal",
29
+ "phm_dim": 4,
30
+ "phm_init_range": 0.0001,
31
+ "phm_layer": false,
32
+ "phm_rank": 1,
33
+ "reduction_factor": 16,
34
+ "residual_before_ln": true,
35
+ "scaling": 1.0,
36
+ "shared_W_phm": false,
37
+ "shared_phm_rule": true,
38
+ "use_gating": false
39
+ }
40
+ },
41
+ "fusion_config_map": {},
42
+ "fusions": {}
43
+ },
44
+ "architectures": [
45
+ "RobertaModelWithHeads"
46
+ ],
47
+ "attention_probs_dropout_prob": 0.1,
48
+ "bos_token_id": 0,
49
+ "classifier_dropout": null,
50
+ "eos_token_id": 2,
51
+ "hidden_act": "gelu",
52
+ "hidden_dropout_prob": 0.1,
53
+ "hidden_size": 768,
54
+ "initializer_range": 0.02,
55
+ "intermediate_size": 3072,
56
+ "layer_norm_eps": 1e-05,
57
+ "max_position_embeddings": 514,
58
+ "model_type": "roberta",
59
+ "num_attention_heads": 12,
60
+ "num_hidden_layers": 12,
61
+ "pad_token_id": 1,
62
+ "position_embedding_type": "absolute",
63
+ "prediction_heads": {
64
+ "drop": {
65
+ "activation_function": "tanh",
66
+ "head_type": "question_answering",
67
+ "label2id": {
68
+ "LABEL_0": 0,
69
+ "LABEL_1": 1
70
+ },
71
+ "layers": 1,
72
+ "num_labels": 2
73
+ }
74
+ },
75
+ "torch_dtype": "float32",
76
+ "transformers_version": "4.21.3",
77
+ "type_vocab_size": 1,
78
+ "use_cache": false,
79
+ "vocab_size": 50265
80
+ }