from transformers import BertModel
import torch
#pip install torch -i https://pypi.tuna.tsinghua.edu.cn/simple
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model_path = r"D:\pythonWork\python_demo\model\google-bert\bert-base-chinese\models--google-bert--bert-base-chinese\snapshots\c30a6ed22ab4564dc1e3b2ecbf6e766b0611a33f"
pretrained = BertModel.from_pretrained(model_path).to(DEVICE)
# print(pretrained)
class MyModel(torch.nn.Module):
    def __init__(self):
        super(MyModel, self).__init__()
        self.pretrained = pretrained
        self.fc = torch.nn.Linear(768, 2)
    def forward(self, input_ids, attention_mask,token_type_ids):
        with torch.no_grad():
            out = self.pretrained(input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids)
        out = self.fc(out.last_hidden_state[:, 0, :])
        out = out.softmax(dim=1)
        return out