File size: 1,505 Bytes
a0aa016
 
 
76ae1d9
a0aa016
76ae1d9
a0aa016
76ae1d9
 
 
 
 
a0aa016
28c54e8
 
 
a0aa016
3a2439b
a0aa016
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
import gradio as gr

checkpoint = "Mr-Vicky-01/conversational_sumarization"
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
model = AutoModelForSeq2SeqLM.from_pretrained(checkpoint)

def generate_summary(text):
    inputs = tokenizer([text], max_length=1024, return_tensors='pt', truncation=True)
    summary_ids = model.generate(inputs['input_ids'], max_new_tokens=100, do_sample=False)
    summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
    return summary

examples = [
    ["The rise of artificial intelligence (AI) has revolutionized various industries, including healthcare, finance, and transportation. AI technologies such as machine learning and natural language processing have enabled computers to perform tasks that were once thought to be exclusive to humans. In healthcare, AI is being used to diagnose diseases, personalize treatment plans, and predict patient outcomes. In finance, AI algorithms analyze market trends, manage portfolios, and detect fraudulent activities. Additionally, AI-powered autonomous vehicles are poised to transform transportation by enhancing safety and efficiency on the roads. Overall, the widespread adoption of AI is reshaping the way we live and work, with profound implications for society in the coming years."],
]

demo = gr.Interface(fn=generate_summary, inputs='text',outputs='text',title='Text Summarization', examples=examples)
demo.launch(debug=True,share=True)