from transformers import BertModel, BertConfig
import torch.nn as nn


class BertEmbeddingModel(nn.Module):
    def __init__(self, bert_model_path):
        super(BertEmbeddingModel, self).__init__()
        self.embedding_model = BertModel.from_pretrained(bert_model_path)
        self.bert_config = BertConfig.from_pretrained(bert_model_path)
        self.embedding_model.requires_grad_(True)

    def set_grad(self, mode=True):
        self.embedding_model.requires_grad_(mode)

    def forward(self, input_ids, token_type_ids=None, attention_mask=None):
        outputs = self.embedding_model(input_ids, token_type_ids, attention_mask)
        return outputs


