from torch import nn
from transformers import AutoTokenizer, AutoModelForPreTraining
import model.config as conf


class LongForm(nn.Module):
    def __init__(self, n_model, cls_nu):
        super(LongForm, self).__init__()
        # tokenizer = AutoTokenizer.from_pretrained("schen/longformer-chinese-base-4096")
        self.lf_model = AutoModelForPreTraining.from_pretrained(conf.model_name_or_path)
        # self.dropout = nn.Dropout(p=0.1)
        self.fc = nn.Linear(n_model, cls_nu)

    def forward(self, x, atten_mask):
        result = self.lf_model(x, atten_mask)
        # result = self.dropout(result.prediction_logits[:, 0, :])
        result = self.fc(result.prediction_logits[:, 0, :])
        result = nn.functional.softmax(result, dim=1)
        return result


if __name__ == '__main__':
    pass
    # model = LongForm(2)
    # print(model)