Spaces:
Sleeping
Sleeping
import torch | |
import gradio as gr | |
from accelerate import Accelerator | |
from transformers import pipeline | |
# Initialize the accelerator | |
accelerator = Accelerator() | |
# Print a description of the current configuration | |
print("Accelerator State:", accelerator.state) | |
# Define the path to your custom model | |
model_path = ( | |
"../Models/models--sshleifer--distilbart-cnn-12-6/snapshots" | |
"/a4f8f3ea906ed274767e9906dbaede7531d660ff" | |
) | |
# Initialize the text summarization pipeline | |
try: | |
text_summary = pipeline( | |
"summarization", | |
model=model_path, | |
torch_dtype=torch.bfloat16, # Use bfloat16 for better performance on supported hardware | |
device=0 if torch.cuda.is_available() else -1 # Use GPU if available | |
) | |
except Exception as e: | |
print(f"Error initializing the summarization pipeline: {e}") | |
raise | |
# Define the Gradio interface function | |
def summary(input_text): | |
try: | |
output = text_summary(input_text) | |
return output[0]['summary_text'] | |
except Exception as e: | |
return f"An error occurred while summarizing: {e}" | |
# Close any existing Gradio interfaces | |
gr.close_all() | |
# Define the Gradio interface | |
demo = gr.Interface( | |
fn=summary, | |
inputs=[gr.Textbox(label="Input text to summarize", lines=6)], | |
outputs=[gr.Textbox(label="Summarized text", lines=4)], | |
title="GenAIProject01: Text Summarizer", | |
description="THIS APPLICATION SUMMARIZE INPUT TEXT USING A PRE-TRAINED MODEL." | |
) | |
# Launch the Gradio app | |
demo.launch() | |