from transformers import AutoTokenizer import gradio as gr tokenizer = AutoTokenizer.from_pretrained("gpt2") def tokenize(input_text): tokens = tokenizer(input_text)["input_ids"] return f"Number of tokens: {len(tokens)}" iface = gr.Interface(fn=tokenize, inputs=gr.inputs.Textbox(lines=7), outputs="text") iface.launch()