danschr commited on
Commit
a3b24ec
1 Parent(s): 9f94f60

Upload DebertaArgClassifier

Browse files
config.json ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "DebertaArgClassifier"
4
+ ],
5
+ "auto_map": {
6
+ "AutoConfig": "configuration_deberta_arg_classifier.DebertaConfig",
7
+ "AutoModelForSequenceClassification": "modeling_deberta_arg_classifier.DebertaArgClassifier"
8
+ },
9
+ "id2label": {
10
+ "0": "Self-direction: thought",
11
+ "1": "Self-direction: action",
12
+ "2": "Stimulation",
13
+ "3": "Hedonism",
14
+ "4": "Achievement",
15
+ "5": "Power: dominance",
16
+ "6": "Power: resources",
17
+ "7": "Face",
18
+ "8": "Security: personal",
19
+ "9": "Security: societal",
20
+ "10": "Tradition",
21
+ "11": "Conformity: rules",
22
+ "12": "Conformity: interpersonal",
23
+ "13": "Humility",
24
+ "14": "Benevolence: caring",
25
+ "15": "Benevolence: dependability",
26
+ "16": "Universalism: concern",
27
+ "17": "Universalism: nature",
28
+ "18": "Universalism: tolerance",
29
+ "19": "Universalism: objectivity"
30
+ },
31
+ "label2id": null,
32
+ "model_type": "deberta_multilabel",
33
+ "number_labels": 20,
34
+ "torch_dtype": "float32",
35
+ "transformers_version": "4.26.1"
36
+ }
configuration_deberta_arg_classifier.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ from transformers import PretrainedConfig
2
+
3
+ class DebertaConfig(PretrainedConfig):
4
+ model_type = "deberta_multilabel"
5
+
6
+ def __init__(self, num_labels: int=20, **kwargs):
7
+ self.number_labels = num_labels
8
+ super().__init__(**kwargs)
modeling_deberta_arg_classifier.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import PreTrainedModel, AutoModel
2
+ import torch.nn as nn
3
+ import torch
4
+ from deberta_arg_classifier.configuration_deberta_arg_classifier import DebertaConfig
5
+
6
+
7
+ class DebertaArgClassifier(PreTrainedModel):
8
+
9
+ config_class = DebertaConfig
10
+
11
+ def __init__(self, config):
12
+ super().__init__(config)
13
+ self.bert = AutoModel.from_pretrained("microsoft/deberta-large")
14
+ self.classifier = nn.Linear(self.bert.config.hidden_size, config.number_labels)
15
+ self.criterion = nn.BCEWithLogitsLoss()
16
+
17
+
18
+ def forward(self, input_ids, attention_mask, labels=None):
19
+ output = self.bert(input_ids, attention_mask=attention_mask)
20
+ output = self._cls_embeddings(output)
21
+ output_cls = self.classifier(output)
22
+ output = torch.sigmoid(output_cls)
23
+ if labels is not None:
24
+ loss = self.cirterion(output_cls, labels)
25
+ return {"loss": loss, "logits": output}
26
+ return {"logits": output}
27
+
28
+
29
+ def _cls_embeddings(self, output):
30
+ '''Returns the embeddings corresponding to the <CLS> token of each text. '''
31
+
32
+ last_hidden_state = output[0]
33
+ cls_embeddings = last_hidden_state[:, 0]
34
+ return cls_embeddings
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7fef9869bc1b134b27c54ee1d90bc396dbecd058cdac65fadbaa6e75437e0a8a
3
- size 1620893171
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f563b917be492d981f2b7f518d2b9e5db5f2dda7be1f94ee0f77ff083723a3d4
3
+ size 1620866665