# Creating the customized model, by adding a drop out and a dense layer on top of distil bert to get the final output for the model. # Importing the libraries needed import torch import transformers from torch.utils.data import Dataset, DataLoader from transformers import DistilBertModel, DistilBertTokenizer, PreTrainedModel from configuration_essay_clarity import DistillBERTClassClarityConfig class DistillBERTClassClarity(PreTrainedModel): config_class = DistillBERTClassClarityConfig def __init__(self, config): super().__init__(config) self.l1 = DistilBertModel.from_pretrained("distilbert-base-uncased") self.pre_classifier = torch.nn.Linear(768, 768) self.dropout = torch.nn.Dropout(0.3) self.classifier = torch.nn.Linear(768, 1) # https://glassboxmedicine.com/2019/05/26/classification-sigmoid-vs-softmax/ # self.softmax = torch.nn.Softmax(dim=1) # self.sigmoid = torch.nn.Sigmoid() # apply sigmoid on vector of 1*4 def forward(self, ids=None, mask=None): output_1 = self.l1(input_ids=ids, attention_mask=mask) hidden_state = output_1[0] pooler = hidden_state[:, 0] pooler = self.pre_classifier(pooler) pooler = torch.nn.ReLU()(pooler) pooler = self.dropout(pooler) output = self.classifier(pooler) # output = self.sigmoid(output) return output