plip commited on
Commit
652c3b1
1 Parent(s): aaa8314

Add id-mbertmodel-monotok-adapter

Browse files
config.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "adapters": {
3
+ "adapters": {
4
+ },
5
+ "config_map": {
6
+ "d345095bcb01e325": {
7
+ "adapter_residual_before_ln": false,
8
+ "attention_type": null,
9
+ "invertible_adapter": null,
10
+ "leave_out": [],
11
+ "ln_after": false,
12
+ "ln_before": false,
13
+ "mh_adapter": false,
14
+ "new_attention_norm": null,
15
+ "non_linearity": "gelu",
16
+ "original_ln_after": true,
17
+ "original_ln_before": true,
18
+ "output_adapter": true,
19
+ "reduction_factor": 2,
20
+ "residual_before_ln": true
21
+ },
22
+ "text_lang": {
23
+ "adapter_residual_before_ln": false,
24
+ "attention_type": null,
25
+ "invertible_adapter": null,
26
+ "leave_out": [],
27
+ "ln_after": false,
28
+ "ln_before": false,
29
+ "mh_adapter": false,
30
+ "new_attention_norm": null,
31
+ "non_linearity": "gelu",
32
+ "original_ln_after": true,
33
+ "original_ln_before": true,
34
+ "output_adapter": true,
35
+ "reduction_factor": 2,
36
+ "residual_before_ln": true
37
+ }
38
+ }
39
+ },
40
+ "architectures": [
41
+ "BertForMaskedLM"
42
+ ],
43
+ "attention_probs_dropout_prob": 0.1,
44
+ "directionality": "bidi",
45
+ "embeddings_type": "full",
46
+ "hidden_act": "gelu",
47
+ "hidden_dropout_prob": 0.1,
48
+ "hidden_size": 768,
49
+ "initializer_range": 0.02,
50
+ "intermediate_size": 3072,
51
+ "layer_norm_eps": 1e-12,
52
+ "max_position_embeddings": 512,
53
+ "model_type": "bert",
54
+ "num_attention_heads": 12,
55
+ "num_hidden_layers": 12,
56
+ "pad_token_id": 0,
57
+ "pooler_fc_size": 768,
58
+ "pooler_num_attention_heads": 12,
59
+ "pooler_num_fc_layers": 3,
60
+ "pooler_size_per_head": 128,
61
+ "pooler_type": "first_token_transform",
62
+ "type_vocab_size": 2,
63
+ "vocab_size": 30521
64
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d911705f9169f4aadf47205f9ca5447920e9ecd3a2feb42e56192ea5da038803
3
+ size 468900811
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"do_lower_case": false, "max_len": 512, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
vocab.txt ADDED
The diff for this file is too large to render. See raw diff