Spaces:
Running
Running
File size: 1,114 Bytes
4961115 b0ac9b3 90b10ad b0ac9b3 98b36ac 90b10ad 4961115 fa2b247 90b10ad 98b36ac b0ac9b3 98b36ac b0ac9b3 89a4135 fa2b247 4961115 fa2b247 90b10ad fa2b247 89a4135 fa2b247 89a4135 cb2c11c 89a4135 cb2c11c dda1a7e cb2c11c 89a4135 cb2c11c 8a2f6f7 89a4135 fa2b247 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
import torch
import gradio as gr
from fastai.text.all import *
from transformers import BartTokenizer
from datasets import load_dataset, load_metric
# Load the pre-trained model and tokenizer (adjust for Bart if needed)
pretrained_model_name = "facebook/bart-large-cnn" # Or "facebook/bart-base"
hf_tokenizer = BartTokenizer.from_pretrained(pretrained_model_name)
# Load the SQuAD dataset
squad = load_dataset("squad")
# Load the SQuAD metric
squad_metric = load_metric("squad", data_dir="path/to/squad/dataset", trust_remote_code=True)
def summarize(article):
# Define your data transformation pipeline here, if applicable
# ...
# Load the exported model
learn = load_learner('article_highlights.pkl')
# Generate the summary
summary = learn.blurr_generate(article)[0]
return summary
# Create the Gradio interface
iface = gr.Interface(
fn=summarize,
inputs="text",
outputs="text",
title="Article Summarizer",
description="Enter an article and get a summary.",
examples=[["This is an example article..."]]
)
# Launch the Gradio interface
iface.launch()
|