import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM model_name = "akhooli/arabi-llama" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name) def generate_text(prompt): inputs = tokenizer(prompt, return_tensors="pt") outputs = model.generate(**inputs, max_new_tokens=150, do_sample=True, temperature=0.7) return tokenizer.decode(outputs[0], skip_special_tokens=True) gr.Interface(fn=generate_text, inputs=gr.Textbox(lines=2, placeholder="اكتب هنا", label="المطالبة"), outputs=gr.Textbox(label="النص الناتج"), title="اكتبلي - نموذج خفيف بدون API" ).launch()