import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline # Load the model from Hugging Face Hub MODEL_NAME = "tacab/somali-agriculture" tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) model = AutoModelForCausalLM.from_pretrained(MODEL_NAME) # Create text generation pipeline generator = pipeline("text-generation", model=model, tokenizer=tokenizer) # Function to use in the Gradio interface def generate_text(prompt): output = generator( prompt, do_sample=True, max_length=200, pad_token_id=tokenizer.eos_token_id ) return output[0]["generated_text"] # Build Gradio interface gr.Interface( fn=generate_text, inputs=gr.Textbox(lines=3, label="Geli qoraalka/Prompt-ka"), outputs=gr.Textbox(label="Natiijada/Generated Text"), title="Tacab – Somali Agriculture Generator", description="App-kan wuxuu adeegsadaa model-ka 'tacab/somali-agriculture' si uu u abuuro qoraal cusub oo la xiriira beeraha Soomaaliyeed." ).launch()