import torch
from transformers import BertTokenizer, BertForMaskedLM


def _test_fill_mask():
    tokenizer = BertTokenizer.from_pretrained('model/chinese-bert-wwm')
    model = BertForMaskedLM.from_pretrained('model/chinese-bert-wwm')
    input = tokenizer.encode_plus('我想明天去[MASK]家吃饭.', return_tensors='pt')
    print('input--->', input)
    model.eval()
    output = model(**input)
    print('output.logits.shape--->', output.logits.shape)  # [1, 12, 21128]
    mask_pred_idx = torch.argmax(output.logits[0][6]).item()
    print('概率最高的字:', tokenizer.convert_ids_to_tokens([mask_pred_idx]))  # ['她']


if __name__ == '__main__':
    _test_fill_mask()
    pass
