haowei commited on
Commit
1e8a310
1 Parent(s): 9cb7040
Files changed (2) hide show
  1. config.json +7 -2
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,5 +1,7 @@
1
  {
2
- "_name_or_path": "roberta-base",
 
 
3
  "adapters": {
4
  "adapters": {},
5
  "config_map": {},
@@ -7,12 +9,14 @@
7
  "fusions": {}
8
  },
9
  "architectures": [
10
- "RobertaModel"
11
  ],
12
  "attention_probs_dropout_prob": 0.1,
 
13
  "bos_token_id": 0,
14
  "classifier_dropout": null,
15
  "eos_token_id": 2,
 
16
  "hidden_act": "gelu",
17
  "hidden_dropout_prob": 0.1,
18
  "hidden_size": 768,
@@ -25,6 +29,7 @@
25
  "num_hidden_layers": 12,
26
  "pad_token_id": 1,
27
  "position_embedding_type": "absolute",
 
28
  "torch_dtype": "float32",
29
  "transformers_version": "4.11.3",
30
  "type_vocab_size": 1,
1
  {
2
+ "_name_or_path": "/home/haowei/haowei/continual-post-training/agnews_unsup_roberta",
3
+ "adapter_mode": "parallel",
4
+ "adapter_task": 5,
5
  "adapters": {
6
  "adapters": {},
7
  "config_map": {},
9
  "fusions": {}
10
  },
11
  "architectures": [
12
+ "RobertaMaskForSequenceClassification"
13
  ],
14
  "attention_probs_dropout_prob": 0.1,
15
+ "attn_adapter_size": 512,
16
  "bos_token_id": 0,
17
  "classifier_dropout": null,
18
  "eos_token_id": 2,
19
+ "ffn_adapter_size": 768,
20
  "hidden_act": "gelu",
21
  "hidden_dropout_prob": 0.1,
22
  "hidden_size": 768,
29
  "num_hidden_layers": 12,
30
  "pad_token_id": 1,
31
  "position_embedding_type": "absolute",
32
+ "smax": 400,
33
  "torch_dtype": "float32",
34
  "transformers_version": "4.11.3",
35
  "type_vocab_size": 1,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:afffc47bc6d52674826e4bf0b8d69c5b5a80fba0a182bac937cd4a0406cb959a
3
- size 498669495
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e17d312f7eeafb919e61b2e50fa0216dfe446599d1991c22e983aee7e9147c9c
3
+ size 593931417