Medini / app.py
PuruAI's picture
Update app.py
6520b89 verified
import os
import gradio as gr
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
# Model configuration
MODEL_ID = "PuruAI/Medini_Intelligence"
FALLBACK_MODEL = "gpt2"
HF_TOKEN = os.getenv("HF_TOKEN") # must be set in your env/secrets
def load_model(model_id):
"""Load Medini if available, otherwise fallback to GPT-2."""
try:
print(f"🔹 Loading model: {model_id}")
tokenizer = AutoTokenizer.from_pretrained(model_id, token=HF_TOKEN)
model = AutoModelForCausalLM.from_pretrained(model_id, token=HF_TOKEN)
return pipeline("text-generation", model=model, tokenizer=tokenizer)
except Exception as e:
print(f"❌ Failed to load {model_id}: {e}")
print("⏩ Falling back to GPT-2 (no token needed)")
return pipeline("text-generation", model=FALLBACK_MODEL)
# Initialize pipeline
generator = load_model(MODEL_ID)
def generate_text(prompt):
outputs = generator(prompt, max_length=200, num_return_sequences=1)
return outputs[0]["generated_text"]
# Gradio UI
iface = gr.Interface(
fn=generate_text,
inputs="text",
outputs="text",
title="Medini Intelligence",
description="Custom AI Agent with fallback to GPT-2"
)
if __name__ == "__main__":
iface.launch()