File size: 1,429 Bytes
2bb0b26
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
# coding=utf-8
# author: xusong <xusong28@jd.com>
# time: 2022/8/23 17:08

"""



interface = gr.Interface.load(
    "models/bert-base-uncased", api_key=None, alias="fill-mask"
)
"""

import gradio as gr
from transformers import FillMaskPipeline
from transformers import BertTokenizer
from modeling_kplug import KplugForMaskedLM

model_dir = "models/pretrain/"
tokenizer = BertTokenizer.from_pretrained(model_dir)
model = KplugForMaskedLM.from_pretrained(model_dir)


# fill mask
def fill_mask(text):
    fill_masker = FillMaskPipeline(model=model, tokenizer=tokenizer)
    outputs = fill_masker(text)
    return {i["token_str"]: i["score"] for i in outputs}


mlm_examples = [
    "这款连[MASK]裙真漂亮",
    "这是杨[MASK]同款包包,精选优质皮料制作",
    "美颜去痘洁面[MASK]",
]

mlm_iface = gr.Interface(
    fn=fill_mask,
    inputs=gr.inputs.Textbox(
        label="输入文本",
        default="这款连[MASK]裙真漂亮"),
    outputs=gr.Label(
        label="填词",
        show_label=False,
    ),
    examples=mlm_examples,
    title="文本填词",
    description='电商领域文本摘要, 基于KPLUG预训练语言模型,'
                '<a href=""> K-PLUG: Knowledge-injected Pre-trained Language Model for Natural Language Understanding'
                ' and Generation in E-Commerce (Findings of EMNLP 2021) </a>。'
)

if __name__ == "__main__":
    mlm_iface.launch()