Spaces:
Running
Running
import gradio as gr | |
import nagisa | |
def tokenize(text): | |
tokens = nagisa.tagging(text) | |
return tokens.words, tokens.postags | |
num_input_lines = 3 | |
input_placeholder = "Please input text here." | |
title = "Japanese tokenization demo" | |
default_text = "ここにテキストを入力し、Submit を押してください。" | |
description = ( | |
"This is a demo page for nagisa's tokenization. By entering text into the" | |
" text box, you can verify the output results of nagisa. nagisa provides" | |
" an easy-to-use feature for word segmentation and part-of-speech tagging" | |
" in Japanese text. https://github.com/taishi-i/nagisa" | |
) | |
examples = ["Pythonで簡単に使えるツールです", "3月に見た「3月のライオン」", "福岡・博多の観光情報"] | |
iface = gr.Interface( | |
fn=tokenize, | |
inputs=gr.inputs.Textbox( | |
lines=num_input_lines, | |
placeholder=input_placeholder, | |
default=default_text, | |
), | |
title=title, | |
description=description, | |
examples=examples, | |
outputs=["text", "text"], | |
) | |
iface.launch() | |