File size: 1,750 Bytes
2909361
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import os
import gradio as gr

from transformers import AutoTokenizer


def _load_tokenizer(model_name: str = "Qwen/Qwen3-0.6B"):
    tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
    return tokenizer

def _update_tokenizer_name(choice):
    tokenizer_name = choice
    return tokenizer_name


def _hanle_request(text_input, tokenizer_name):
    print(f"tokenizer:{tokenizer_name}, text_input:{text_input}")
    tokenizer = _load_tokenizer(tokenizer_name)
    token_ids = tokenizer([text_input], return_tensors="pt")
    token_ids_list = token_ids.input_ids.tolist()
    print(token_ids_list)
    return " ".join([str(item) for item in token_ids_list[0]])


if __name__ == "__main__":
    tokenizer_names = ["Qwen/Qwen3-0.6B", "Qwen/Qwen2.5-0.5B-Instruct", "Qwen/Qwen2-0.5B-Instruct", "openai-community/gpt2-medium", "deepseek-ai/DeepSeek-R1", "deepseek-ai/DeepSeek-V3-0324"]

    with gr.Blocks() as demo:
        gr.Markdown("# Try to test multi tokenizers!")
        # submit flow
        with gr.Row():
            dropdown = gr.Dropdown(choices=tokenizer_names, label="Choose a tokenizer")
            text_input = gr.Textbox(label="Input any texts!", value="Hello World!")
            tokenizer_name_input = gr.Textbox(label="Tokenizer name", value=tokenizer_names[0], visible=False, interactive=False)
            submit_button = gr.Button(value="Submit", variant="primary")
        
        with gr.Row():
            text_output = gr.Textbox(label="Output token ids.")

        dropdown.change(fn=_update_tokenizer_name, inputs=[dropdown], outputs=tokenizer_name_input)
        submit_button.click(fn=_hanle_request, inputs=[text_input, tokenizer_name_input], outputs=text_output)

    demo.launch(share=True)