import torch.nn as nn
from utils import init_params


class BaseSentencePositiveModel(nn.Module):
    def __init__(self, base_model: nn.Module, pretrained_model_name: str, n_classes=2, **kwargs):
        super(BaseSentencePositiveModel, self).__init__()
        self.base = base_model
        dim = 1024 if 'large' in pretrained_model_name else 768

        self.out = nn.Sequential(
            nn.Linear(dim, n_classes)
        )

        init_params([self.out])  #

    def forward(self, input_ids, attention_mask):
        bert_output = self.base(input_ids=input_ids,
                                attention_mask=attention_mask)

        last_layer_hidden_states = bert_output.hidden_states[-1]

        context_vector = last_layer_hidden_states[:, 0, :]

        out = self.out(context_vector)

        return out
