kplug / kplug_lm_test.py
xusong28
init
101e5d8
raw history blame
No virus
895 Bytes
# coding=utf-8
# author: xusong <xusong28@jd.com>
# time: 2021/9/17 17:43
import torch
from transformers import BertTokenizer
from modeling_kplug import KplugForMaskedLM
model_dir = "../../models/hugging_face/kplug/"
tokenizer = BertTokenizer.from_pretrained(model_dir)
model = KplugForMaskedLM.from_pretrained(model_dir)
input_ids = torch.tensor(tokenizer.encode("这款连衣裙真漂亮", add_special_tokens=True)).unsqueeze(0)
outputs = model(input_ids)
# fill mask
from transformers import FillMaskPipeline
fill_masker = FillMaskPipeline(model=model, tokenizer=tokenizer)
outputs = fill_masker(f"这款连[MASK]裙真漂亮")
print(outputs)
from transformers import pipeline
nlp_fill = pipeline('fill-mask', topk=10)
def from_url():
MODEL_PATH = "http://storage.jd.com/language-models/kplug/huggingface/pytorch_model.bin"
model = KplugForMaskedLM.from_pretrained(MODEL_PATH)