Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import pipeline, AutoTokenizer | |
| ############## | |
| # <Greeting> | |
| # def greet(name): | |
| # return f"Hello {name}!" | |
| # demo = gr.Interface(fn=greet, inputs="text", outputs="text") | |
| ############## | |
| # <Hotdog Not Hotdog> | |
| # pipeline = pipeline(task="image-classification", model="julien-c/hotdog-not-hotdog") | |
| # def predict(image): | |
| # predictions = pipeline(image) | |
| # return {p["label"]: p["score"] for p in predictions} | |
| # demo = gr.Interface( | |
| # predict, | |
| # inputs=gr.inputs.Image(label="Upload hot dog candidate", type="filepath"), | |
| # outputs=gr.outputs.Label(num_top_classes=2), | |
| # title="Hot Dog? Or Not?" | |
| # ) | |
| tokenizer = AutoTokenizer.from_pretrained("alabnii/jmedroberta-base-manbyo-wordpiece", **{ | |
| "mecab_kwargs": { | |
| "mecab_option": "-u MANBYO_201907_Dic-utf8.dic" | |
| } | |
| }) | |
| pipeline = pipeline( | |
| "fill-mask", | |
| model="alabnii/jmedroberta-base-manbyo-wordpiece", | |
| tokenizer=tokenizer, | |
| top_k=20 | |
| ) | |
| def fill(text): | |
| filled = pipeline(text) | |
| return {x["token_str"]: x["score"] for x in filled} | |
| demo = gr.Interface( | |
| fill, | |
| inputs="text", | |
| outputs=gr.Label(label="Output"), | |
| title="fill-mask", | |
| examples=[['この患者は[MASK]と診断された。']] | |
| ) | |
| demo.launch() | |