tmills commited on
Commit
86cf823
1 Parent(s): 2cdabf4

Updated to use latest versoin of cnlpt which saves model params in config file.

Browse files
added_tokens.json CHANGED
@@ -1 +1 @@
1
- {"</a2>": 50270, "<e>": 50265, "<cr>": 50271, "</a1>": 50268, "<a1>": 50267, "<neg>": 50272, "<a2>": 50269, "</e>": 50266}
 
1
+ {"<a1>": 50267, "<cr>": 50271, "</a1>": 50268, "<e>": 50265, "</e>": 50266, "<neg>": 50272, "<a2>": 50269, "</a2>": 50270}
config.json CHANGED
@@ -9,20 +9,25 @@
9
  "finetuning_task": [
10
  "event"
11
  ],
 
12
  "gradient_checkpointing": false,
13
  "hidden_act": "gelu",
14
  "hidden_dropout_prob": 0.1,
15
  "hidden_size": 768,
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
 
18
  "layer_norm_eps": 1e-05,
19
  "max_position_embeddings": 514,
20
  "model_type": "roberta",
21
  "num_attention_heads": 12,
22
  "num_hidden_layers": 12,
 
23
  "pad_token_id": 1,
24
  "position_embedding_type": "absolute",
25
- "transformers_version": "4.4.0.dev0",
 
 
26
  "type_vocab_size": 1,
27
  "use_cache": true,
28
  "vocab_size": 50273
 
9
  "finetuning_task": [
10
  "event"
11
  ],
12
+ "freeze": false,
13
  "gradient_checkpointing": false,
14
  "hidden_act": "gelu",
15
  "hidden_dropout_prob": 0.1,
16
  "hidden_size": 768,
17
  "initializer_range": 0.02,
18
  "intermediate_size": 3072,
19
+ "layer": 11,
20
  "layer_norm_eps": 1e-05,
21
  "max_position_embeddings": 514,
22
  "model_type": "roberta",
23
  "num_attention_heads": 12,
24
  "num_hidden_layers": 12,
25
+ "num_rel_attention_heads": 12,
26
  "pad_token_id": 1,
27
  "position_embedding_type": "absolute",
28
+ "rel_attention_head_dims": 64,
29
+ "tokens": false,
30
+ "transformers_version": "4.4.2",
31
  "type_vocab_size": 1,
32
  "use_cache": true,
33
  "vocab_size": 50273
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:00a280c01d55f72bc9ecc22d6a5e6ff3f315161aa7c468486279f61558a2ae24
3
  size 501053685
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a67fffd8017200ed128cf604d230dd213be714e71f5cd6501852010967f0397
3
  size 501053685
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": true, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "additional_special_tokens": ["<e>", "</e>", "<a1>", "</a1>", "<a2>", "</a2>", "<cr>", "<neg>"], "model_max_length": 512, "name_or_path": "roberta-base"}
 
1
+ {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": true, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "additional_special_tokens": ["<e>", "</e>", "<a1>", "</a1>", "<a2>", "</a2>", "<cr>", "<neg>"], "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "roberta-base"}