Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import bap_preprocessing | |
| import json | |
| def tokenize(Sentence): | |
| response = bap_preprocessing.tokenize(Sentence) | |
| return response | |
| with gr.Blocks() as demo: | |
| gr.Markdown( | |
| """ | |
| # Tokenizer | |
| """ | |
| ) | |
| input_s = gr.Textbox(placeholder="Sentence to be tokenized.", label="Sentence") | |
| output = gr.Textbox(label="Tokens") | |
| submit = gr.Button(text="Tokenize") | |
| submit.click(fn=tokenize, inputs=input_s, outputs=output) | |
| examples = gr.Examples([ | |
| "Ben oraya geliyorum.", | |
| "Sen neden gelmiyorsun?" | |
| ], inputs=input_s) | |
| demo.launch() | |