File size: 3,019 Bytes
2cdabf4
 
896cd60
2cdabf4
dcd6a39
896cd60
dcd6a39
896cd60
 
dcd6a39
896cd60
 
 
dcd6a39
896cd60
 
 
 
 
 
 
 
dcd6a39
 
896cd60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dcd6a39
896cd60
 
dcd6a39
896cd60
dcd6a39
896cd60
 
 
 
 
 
 
 
 
dcd6a39
896cd60
 
 
 
 
 
 
 
 
 
 
dcd6a39
896cd60
 
 
 
 
 
 
dcd6a39
 
 
896cd60
 
dcd6a39
896cd60
dcd6a39
2cdabf4
 
 
 
 
dcd6a39
 
 
 
 
 
 
 
 
 
 
 
 
 
 
896cd60
86cf823
 
dcd6a39
 
 
 
 
 
86cf823
896cd60
dcd6a39
896cd60
dcd6a39
2cdabf4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
{
  "architectures": [
    "CnlpModelForClassification"
  ],
  "cnlpt_version": "0.6.0",
  "encoder_config": {
    "_name_or_path": "microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract",
    "add_cross_attention": false,
    "architectures": [
      "BertForMaskedLM"
    ],
    "attention_probs_dropout_prob": 0.1,
    "bad_words_ids": null,
    "bos_token_id": null,
    "chunk_size_feed_forward": 0,
    "classifier_dropout": null,
    "cross_attention_hidden_size": null,
    "decoder_start_token_id": null,
    "diversity_penalty": 0.0,
    "do_sample": false,
    "early_stopping": false,
    "encoder_no_repeat_ngram_size": 0,
    "eos_token_id": null,
    "exponential_decay_length_penalty": null,
    "finetuning_task": null,
    "forced_bos_token_id": null,
    "forced_eos_token_id": null,
    "hidden_act": "gelu",
    "hidden_dropout_prob": 0.1,
    "hidden_size": 768,
    "id2label": {
      "0": "LABEL_0",
      "1": "LABEL_1"
    },
    "initializer_range": 0.02,
    "intermediate_size": 3072,
    "is_decoder": false,
    "is_encoder_decoder": false,
    "label2id": {
      "LABEL_0": 0,
      "LABEL_1": 1
    },
    "layer_norm_eps": 1e-12,
    "length_penalty": 1.0,
    "max_length": 20,
    "max_position_embeddings": 512,
    "min_length": 0,
    "model_type": "bert",
    "no_repeat_ngram_size": 0,
    "num_attention_heads": 12,
    "num_beam_groups": 1,
    "num_beams": 1,
    "num_hidden_layers": 12,
    "num_return_sequences": 1,
    "output_attentions": false,
    "output_hidden_states": false,
    "output_scores": false,
    "pad_token_id": 0,
    "position_embedding_type": "absolute",
    "prefix": null,
    "problem_type": null,
    "pruned_heads": {},
    "remove_invalid_values": false,
    "repetition_penalty": 1.0,
    "return_dict": true,
    "return_dict_in_generate": false,
    "sep_token_id": null,
    "task_specific_params": null,
    "temperature": 1.0,
    "tf_legacy_loss": false,
    "tie_encoder_decoder": false,
    "tie_word_embeddings": true,
    "tokenizer_class": null,
    "top_k": 50,
    "top_p": 1.0,
    "torch_dtype": null,
    "torchscript": false,
    "transformers_version": "4.22.2",
    "type_vocab_size": 2,
    "typical_p": 1.0,
    "use_bfloat16": false,
    "use_cache": true,
    "vocab_size": 28903
  },
  "encoder_name": "microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract",
  "finetuning_task": [
    "event"
  ],
  "hidden_dropout_prob": 0.1,
  "hidden_size": 768,
  "hier_head_config": null,
  "label_dictionary": {
    "event": [
      "B-AFTER",
      "B-BEFORE",
      "B-BEFORE/OVERLAP",
      "B-OVERLAP",
      "I-AFTER",
      "I-BEFORE",
      "I-BEFORE/OVERLAP",
      "I-OVERLAP",
      "O"
    ]
  },
  "layer": 12,
  "model_type": "cnlpt",
  "num_rel_attention_heads": 12,
  "rel_attention_head_dims": 64,
  "relations": {
    "event": false
  },
  "tagger": {
    "event": true
  },
  "tokens": false,
  "torch_dtype": "float32",
  "transformers_version": "4.22.2",
  "use_prior_tasks": false,
  "vocab_size": 28903
}