myunusseker
commited on
Commit
·
041dc95
1
Parent(s):
d413d3f
add model
Browse files- config.json +17 -14
- distilbert_model.py +49 -0
- pytorch_model.bin +2 -2
- tokenizer.json +0 -0
config.json
CHANGED
@@ -5,6 +5,9 @@
|
|
5 |
"DistilBertForMultilabelSequenceClassification"
|
6 |
],
|
7 |
"attention_dropout": 0.1,
|
|
|
|
|
|
|
8 |
"dim": 768,
|
9 |
"dropout": 0.1,
|
10 |
"hidden_dim": 3072,
|
@@ -25,19 +28,19 @@
|
|
25 |
},
|
26 |
"initializer_range": 0.02,
|
27 |
"label2id": {
|
28 |
-
"admiration":
|
29 |
-
"annoyance":
|
30 |
-
"approval":
|
31 |
-
"caring":
|
32 |
-
"confusion":
|
33 |
-
"curiosity":
|
34 |
-
"disapproval":
|
35 |
-
"excitement":
|
36 |
-
"joy":
|
37 |
-
"nervousness":
|
38 |
-
"neutral":
|
39 |
-
"optimism":
|
40 |
-
"sadness":
|
41 |
},
|
42 |
"max_position_embeddings": 512,
|
43 |
"model_type": "distilbert",
|
@@ -49,6 +52,6 @@
|
|
49 |
"sinusoidal_pos_embds": false,
|
50 |
"tie_weights_": true,
|
51 |
"torch_dtype": "float32",
|
52 |
-
"transformers_version": "4.
|
53 |
"vocab_size": 30522
|
54 |
}
|
|
|
5 |
"DistilBertForMultilabelSequenceClassification"
|
6 |
],
|
7 |
"attention_dropout": 0.1,
|
8 |
+
"auto_map": {
|
9 |
+
"AutoModelForSequenceClassification": "distilbert_model.DistilBertForMultilabelSequenceClassification"
|
10 |
+
},
|
11 |
"dim": 768,
|
12 |
"dropout": 0.1,
|
13 |
"hidden_dim": 3072,
|
|
|
28 |
},
|
29 |
"initializer_range": 0.02,
|
30 |
"label2id": {
|
31 |
+
"admiration": 0,
|
32 |
+
"annoyance": 1,
|
33 |
+
"approval": 2,
|
34 |
+
"caring": 3,
|
35 |
+
"confusion": 4,
|
36 |
+
"curiosity": 5,
|
37 |
+
"disapproval": 6,
|
38 |
+
"excitement": 7,
|
39 |
+
"joy": 8,
|
40 |
+
"nervousness": 9,
|
41 |
+
"neutral": 12,
|
42 |
+
"optimism": 10,
|
43 |
+
"sadness": 11
|
44 |
},
|
45 |
"max_position_embeddings": 512,
|
46 |
"model_type": "distilbert",
|
|
|
52 |
"sinusoidal_pos_embds": false,
|
53 |
"tie_weights_": true,
|
54 |
"torch_dtype": "float32",
|
55 |
+
"transformers_version": "4.17.0",
|
56 |
"vocab_size": 30522
|
57 |
}
|
distilbert_model.py
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import pandas as pd
|
3 |
+
import numpy as np
|
4 |
+
from pathlib import Path
|
5 |
+
from transformers import DistilBertForSequenceClassification
|
6 |
+
from transformers.modeling_outputs import SequenceClassifierOutput
|
7 |
+
|
8 |
+
class DistilBertForMultilabelSequenceClassification(DistilBertForSequenceClassification):
|
9 |
+
def __init__(self, config):
|
10 |
+
super().__init__(config)
|
11 |
+
|
12 |
+
def forward(self,
|
13 |
+
input_ids=None,
|
14 |
+
attention_mask=None,
|
15 |
+
head_mask=None,
|
16 |
+
inputs_embeds=None,
|
17 |
+
labels=None,
|
18 |
+
output_attentions=None,
|
19 |
+
output_hidden_states=None,
|
20 |
+
return_dict=None):
|
21 |
+
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
|
22 |
+
|
23 |
+
outputs = self.distilbert(input_ids,
|
24 |
+
attention_mask=attention_mask,
|
25 |
+
head_mask=head_mask,
|
26 |
+
inputs_embeds=inputs_embeds,
|
27 |
+
output_attentions=output_attentions,
|
28 |
+
output_hidden_states=output_hidden_states,
|
29 |
+
return_dict=return_dict)
|
30 |
+
|
31 |
+
hidden_state = outputs[0]
|
32 |
+
pooled_output = hidden_state[:, 0]
|
33 |
+
pooled_output = self.dropout(pooled_output)
|
34 |
+
logits = self.classifier(pooled_output)
|
35 |
+
|
36 |
+
loss = None
|
37 |
+
if labels is not None:
|
38 |
+
loss_fct = torch.nn.BCEWithLogitsLoss()
|
39 |
+
loss = loss_fct(logits.view(-1, self.num_labels),
|
40 |
+
labels.float().view(-1, self.num_labels))
|
41 |
+
|
42 |
+
if not return_dict:
|
43 |
+
output = (logits,) + outputs[2:]
|
44 |
+
return ((loss,) + output) if loss is not None else output
|
45 |
+
|
46 |
+
return SequenceClassifierOutput(loss=loss,
|
47 |
+
logits=logits,
|
48 |
+
hidden_states=outputs.hidden_states,
|
49 |
+
attentions=outputs.attentions)
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c0007ac949fda38934a5c5428f8553ba86505512dbfc116399e205553f8460a9
|
3 |
+
size 267894321
|
tokenizer.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|