from gradio import Interface import gradio as gr import aranizer from aranizer import aranizer_bpe50k, aranizer_bpe64k, aranizer_bpe86k, aranizer_sp32k, aranizer_sp50k, aranizer_sp64k, aranizer_sp86k # List of available tokenizers and a dictionary to load them tokenizer_options = [ "aranizer_bpe50k", "aranizer_bpe64k", "aranizer_bpe86k", "aranizer_sp32k", "aranizer_sp50k", "aranizer_sp64k", "aranizer_sp86k" ] tokenizers = { "aranizer_bpe50k": aranizer_bpe50k.get_tokenizer, "aranizer_bpe64k": aranizer_bpe64k.get_tokenizer, "aranizer_bpe86k": aranizer_bpe86k.get_tokenizer, "aranizer_sp32k": aranizer_sp32k.get_tokenizer, "aranizer_sp50k": aranizer_sp50k.get_tokenizer, "aranizer_sp64k": aranizer_sp64k.get_tokenizer, "aranizer_sp86k": aranizer_sp86k.get_tokenizer, } def compare_tokenizers(tokenizer_name, text): # Load the selected tokenizer tokenizer = tokenizers[tokenizer_name]() tokens = tokenizer.tokenize(text) encoded_output = tokenizer.encode(text, add_special_tokens=True) decoded_text = tokenizer.decode(encoded_output) # Prepare the results to be displayed results = [(tokenizer_name, tokens, encoded_output, decoded_text)] return results # Define the Gradio interface components with a dropdown for model selection inputs_component = [ gr.Dropdown(choices=tokenizer_options, label="Select Tokenizer"), gr.Textbox(lines=2, placeholder="Enter Arabic text here...", label="Input Text") ] outputs_component = gr.Dataframe(headers=["Tokenizer", "Tokens", "Encoded Output", "Decoded Text"], label="Results", elem_height=500, elem_width='100%') # Setting up the interface iface = Interface(fn=compare_tokenizers, inputs=inputs_component, outputs=outputs_component, title="AraNizer Tokenizer Comparison") # Launching the Gradio app iface.launch()