import gradio as gr from transformers import AutoTokenizer, AutoModelForSeq2SeqLM # Load the text summarization model model_name = "Aiyan99/Theus_eleuther_1.3B_concepttagging" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForSeq2SeqLM.from_pretrained(model_name) # Define the Gradio interface def summarize_text(input_text): # Tokenize and generate summary input_ids = tokenizer.encode(input_text, return_tensors="pt", max_length=1024, truncation=True) summary_ids = model.generate(input_ids, max_length=150, min_length=40, length_penalty=2.0, num_beams=4, early_stopping=True) summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True) return summary iface = gr.Interface( fn=summarize_text, inputs="text", outputs="text", title="Concept Tagger", description="Concept tag your text using Aiyan99's Theus model (1.3B Concept Tagging).", ) if __name__ == "__main__": iface.launch()