ma2za commited on
Commit
be9811f
1 Parent(s): 1ff2079

Upload RobertaEmotion

Browse files
config.json CHANGED
@@ -2,12 +2,10 @@
2
  "architectures": [
3
  "RobertaEmotion"
4
  ],
5
- "attention_probs_dropout_prob": 0.1,
6
- "bos_token_id": 0,
7
- "classifier_dropout": null,
8
- "eos_token_id": 2,
9
- "hidden_act": "gelu",
10
- "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
  "0": "sadness",
@@ -17,8 +15,6 @@
17
  "4": "fear",
18
  "5": "surprise"
19
  },
20
- "initializer_range": 0.02,
21
- "intermediate_size": 3072,
22
  "label2id": {
23
  "anger": 3,
24
  "fear": 4,
@@ -27,16 +23,7 @@
27
  "sadness": 0,
28
  "surprise": 5
29
  },
30
- "layer_norm_eps": 1e-05,
31
- "max_position_embeddings": 514,
32
- "model_type": "roberta",
33
- "num_attention_heads": 12,
34
- "num_hidden_layers": 12,
35
- "pad_token_id": 1,
36
- "position_embedding_type": "absolute",
37
  "torch_dtype": "float32",
38
- "transformers_version": "4.28.1",
39
- "type_vocab_size": 1,
40
- "use_cache": true,
41
- "vocab_size": 50265
42
  }
 
2
  "architectures": [
3
  "RobertaEmotion"
4
  ],
5
+ "auto_map": {
6
+ "AutoConfig": "configuration_roberta_emotion.RobertaEmotionConfig",
7
+ "AutoModel": "modeling_roberta_emotion.RobertaEmotion"
8
+ },
 
 
9
  "hidden_size": 768,
10
  "id2label": {
11
  "0": "sadness",
 
15
  "4": "fear",
16
  "5": "surprise"
17
  },
 
 
18
  "label2id": {
19
  "anger": 3,
20
  "fear": 4,
 
23
  "sadness": 0,
24
  "surprise": 5
25
  },
26
+ "model_type": "ma2za/roberta-emotion",
 
 
 
 
 
 
27
  "torch_dtype": "float32",
28
+ "transformers_version": "4.28.1"
 
 
 
29
  }
configuration_roberta_emotion.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ from transformers import PretrainedConfig
2
+
3
+ class RobertaEmotionConfig(PretrainedConfig):
4
+ model_type = "ma2za/roberta-emotion"
5
+
6
+ def __init__(self, **kwargs):
7
+ super().__init__(**kwargs)
modeling_roberta_emotion.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from torch.nn import CrossEntropyLoss
2
+
3
+ import torch
4
+
5
+ from transformers import AutoModel, PreTrainedModel
6
+
7
+ from transformers.modeling_outputs import SequenceClassifierOutput
8
+
9
+ from .configuration_roberta_emotion import RobertaEmotionConfig
10
+
11
+ class RobertaEmotion(PreTrainedModel):
12
+ config_class = RobertaEmotionConfig
13
+
14
+ def __init__(self, config):
15
+ super().__init__(config)
16
+ self.num_labels = config.num_labels
17
+ self.backbone = AutoModel.from_pretrained("roberta-base", config)
18
+ self.dropout = torch.nn.Dropout(p=0.1)
19
+ self.output = torch.nn.Linear(config.hidden_size, config.num_labels)
20
+
21
+ def forward(self, input_ids, labels=None, attention_mask=None):
22
+ model_output = self.backbone(input_ids)
23
+ hidden = model_output.last_hidden_state
24
+ logits = self.output(self.dropout(hidden[:, 0, :]))
25
+
26
+ loss = None
27
+ if labels is not None:
28
+ labels = labels.to(logits.device)
29
+ loss_fct = CrossEntropyLoss()
30
+ loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
31
+
32
+ return SequenceClassifierOutput(loss=loss, logits=logits)
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d37bec16044b9e2af2580094e0e2952d869d7db188b9b6a6b7c80eb2ffd3b663
3
  size 498674549
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:680e4142e5ffd42a300aa54b976193bfc499640ad8161ce83044dc273d128d80
3
  size 498674549