File size: 1,082 Bytes
6bfcc98 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
from transformers import PreTrainedModel, AutoModel
from .configuration_moral import BertItalianoConfig
import torch
class BertItaliano(PreTrainedModel):
config_class = BertItalianoConfig
def __init__(self, config, num_labels_1=6, num_labels_2=3):
super(BertItaliano, self).__init__(config)
self.num_labels1 = num_labels_1
self.num_labels2 = num_labels_2
self.bert = AutoModel.from_pretrained("dbmdz/bert-base-italian-xxl-uncased")
self.dropout = torch.nn.Dropout(0.3)
self.linear_1 = torch.nn.Linear(config.hidden_size, num_labels_1)
self.linear_2 = torch.nn.Linear(config.hidden_size, num_labels_2)
def forward(self, input_ids, attention_mask=None, token_type_ids=None):
outputs = self.bert(input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids)
pooled_output = outputs[1]
pooled_output = self.dropout(pooled_output)
logits_1 = self.linear_1(pooled_output)
logits_2 = self.linear_2(pooled_output)
return logits_1, logits_2 |