import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM import os model_id = "i99om/phi-2" token = os.environ.get("HF_TOKEN") tokenizer = AutoTokenizer.from_pretrained(model_id, use_auth_token=token) model = AutoModelForCausalLM.from_pretrained(model_id, use_auth_token=token) def generate_text(prompt): inputs = tokenizer(prompt, return_tensors="pt") outputs = model.generate(**inputs, max_new_tokens=150) return tokenizer.decode(outputs[0], skip_special_tokens=True) with gr.Blocks(api=True) as demo: textbox = gr.Textbox(label="أدخل النص") output = gr.Textbox(label="الناتج") btn = gr.Button("توليد") btn.click(generate_text, inputs=textbox, outputs=output) demo.launch()