File size: 1,407 Bytes
b975363
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
# Creating the customized model, by adding a drop out and a dense layer on top of distil bert to get the final output for the model.
# Importing the libraries needed
import torch
import transformers
from torch.utils.data import Dataset, DataLoader
from transformers import DistilBertModel, DistilBertTokenizer, PreTrainedModel
from configuration_essay_clarity import DistillBERTClassClarityConfig


class DistillBERTClassClarity(PreTrainedModel):
    config_class = DistillBERTClassClarityConfig

    def __init__(self, config):
        super().__init__(config)
        self.l1 = DistilBertModel.from_pretrained("distilbert-base-uncased")
        self.pre_classifier = torch.nn.Linear(768, 768)
        self.dropout = torch.nn.Dropout(0.3)
        self.classifier = torch.nn.Linear(768, 1)
        # https://glassboxmedicine.com/2019/05/26/classification-sigmoid-vs-softmax/
        # self.softmax = torch.nn.Softmax(dim=1)
        # self.sigmoid = torch.nn.Sigmoid() # apply sigmoid on vector of 1*4

    def forward(self, ids=None, mask=None):
        output_1 = self.l1(input_ids=ids, attention_mask=mask)
        hidden_state = output_1[0]
        pooler = hidden_state[:, 0]
        pooler = self.pre_classifier(pooler)
        pooler = torch.nn.ReLU()(pooler)
        pooler = self.dropout(pooler)
        output = self.classifier(pooler)
        # output = self.sigmoid(output)
        return output