T-Almeida commited on
Commit
9d919b5
1 Parent(s): 519e43a

Upload model

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. modeling_bionexttagger.py +29 -2
config.json CHANGED
@@ -7,7 +7,7 @@
7
  "attention_probs_dropout_prob": 0.1,
8
  "augmentation": "unk",
9
  "auto_map": {
10
- "AutoConfig": "configuration_bionexttager.BioNextTaggerConfig",
11
  "AutoModel": "modeling_bionexttagger.BioNextTaggerModel"
12
  },
13
  "classifier_dropout": null,
 
7
  "attention_probs_dropout_prob": 0.1,
8
  "augmentation": "unk",
9
  "auto_map": {
10
+ "AutoConfig": "modeling_bionexttagger.BioNextTaggerConfig",
11
  "AutoModel": "modeling_bionexttagger.BioNextTaggerModel"
12
  },
13
  "classifier_dropout": null,
modeling_bionexttagger.py CHANGED
@@ -1,7 +1,7 @@
1
 
2
  import os
3
  from typing import Optional, Union
4
- from transformers import AutoModel, PreTrainedModel, AutoConfig, BertModel
5
  from transformers.modeling_outputs import TokenClassifierOutput
6
  from torch import nn
7
  from torch.nn import CrossEntropyLoss
@@ -10,8 +10,35 @@ from typing import List, Optional
10
 
11
  import torch
12
  from itertools import islice
13
- from .configuration_bionexttager import BioNextTaggerConfig
14
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
 
16
  NUM_PER_LAYER = 16
17
 
 
1
 
2
  import os
3
  from typing import Optional, Union
4
+ from transformers import AutoModel, PreTrainedModel, AutoConfig, BertModel, PretrainedConfig
5
  from transformers.modeling_outputs import TokenClassifierOutput
6
  from torch import nn
7
  from torch.nn import CrossEntropyLoss
 
10
 
11
  import torch
12
  from itertools import islice
 
13
 
14
+ class BioNextTaggerConfig(PretrainedConfig):
15
+ model_type = "crf-tagger"
16
+
17
+ def __init__(
18
+ self,
19
+ augmentation = "unk",
20
+ context_size = 64,
21
+ percentage_tags = 0.2,
22
+ p_augmentation = 0.5,
23
+ crf_reduction = "mean",
24
+ version="0.1.1",
25
+ **kwargs,
26
+ ):
27
+ self.version = version
28
+ self.augmentation = augmentation
29
+ self.context_size = context_size
30
+ self.percentage_tags = percentage_tags
31
+ self.p_augmentation = p_augmentation
32
+ self.crf_reduction = crf_reduction
33
+ super().__init__(**kwargs)
34
+
35
+ def get_backbonemodel_config(self):
36
+ backbonemodel_cfg = AutoConfig.from_pretrained(self._name_or_path)#.to_dict()
37
+ for k in backbonemodel_cfg.to_dict():
38
+ if hasattr(self, k):
39
+ setattr(backbonemodel_cfg,k, getattr(self,k))
40
+
41
+ return backbonemodel_cfg
42
 
43
  NUM_PER_LAYER = 16
44