import keras_nlp from keras_nlp.models import GemmaCausalLM import warnings import gradio as gr warnings.filterwarnings('ignore') import os from huggingface_hub import from_pretrained_keras model = from_pretrained_keras("soufyane/gemma_data_science") def process_text_gemma(input_text): response = model.generate(f"question: {input_text}", max_length=256) return response def main(input_text): return process_text_gemma(input_text[0]) gr.Interface( fn=main, inputs=["text"], outputs=["text"], title="Gemma Data Science Model", description="This is a text-to-text model for data science tasks.", live=True ).launch()