import torch
import torch.nn as nn
from transformers import BertTokenizer, BertForSequenceClassification,BertConfig,AutoTokenizer

class BERT(nn.Module):

    def __init__(self,embed_num=859,max_position_embeddings=110 ,options_name = "bert-base-uncased",num_labels=17):
        super(BERT, self).__init__()

        # configuration = BertConfig(vocab_size=embed_num,max_position_embeddings=max_position_embeddings,num_labels=num_labels)

        # self.encoder = BertForSequenceClassification(configuration)
        # self.tokenizer = AutoTokenizer.from_pretrained(options_name)
        self.encoder = BertForSequenceClassification.from_pretrained(options_name,num_labels=num_labels)

    def forward(self, text):
        # start = torch.
        # text = torch.cat(text,dim=)
        # encodings = self.tokenizer(text, truncation=True, padding=True)

        text_fea = self.encoder(text, labels=None)

        return text_fea.logits