File size: 3,119 Bytes
827697f
 
 
 
 
 
6536c32
827697f
6536c32
827697f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6536c32
743bee3
827697f
 
743bee3
6536c32
 
 
827697f
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
def data_summarizer(input_eng):
    try:
        import requests
        from bs4 import  BeautifulSoup
        from googletrans import Translator
        import warnings
        warnings.filterwarnings("ignore")
        from transformers import pipeline
        sentiment = pipeline("sentiment-analysis")
        from transformers import PegasusForConditionalGeneration, AutoTokenizer
        tokenizer = AutoTokenizer.from_pretrained("google/pegasus-xsum")
        warnings.filterwarnings("ignore")
        # from gensim.summarization.summarizer import summarize
        from gensim.summarization import keywords
        from textblob import TextBlob
        translator = Translator()
        # from transformers import pipeline
        # summarizer = pipeline("summarization")
        from transformers import PegasusForConditionalGeneration, PegasusTokenizer
        from transformers import PegasusForConditionalGeneration, AutoTokenizer
        tokenizer = AutoTokenizer.from_pretrained("google/pegasus-xsum")
        # tokenizer = PegasusTokenizer.from_pretrained("google/pegasus-xsum")
        model = PegasusForConditionalGeneration.from_pretrained("google/pegasus-xsum")
        translation=translator.translate(input_eng, dest = "en")
        tokens = tokenizer(translation.text, truncation=True, padding="longest", return_tensors="pt")
        # Summarize 
        summary = model.generate(**tokens)
        # Decode summary
        text = tokenizer.decode(summary[0]).replace("<pad> ","").replace("</s>","")
        # summary = summarizer(translation.text)
        # print(summary[0]['summary_text'])
        translator = Translator()
        # text = summary[0]['summary_text']
        # print(keywords(text,words = 5,lemmatize=False))
        key = keywords(text,words = 5,lemmatize=False)
        # print(key)
        translator = Translator()
        keys = translator.translate(key, dest = translator.detect(input_eng).lang)
        # print("keywords".center(50,"-"))
        # print(keys.text,end = " ")
        translator = Translator()
        out = translator.translate(text, dest = translator.detect(input_eng).lang)
        # senti = sentiment(text)[0]['label']
        analysis=TextBlob(text)
        #print(analysis.polarity)
        # print(analysis.sentiment)
        # print(f"Sentiment: {'Positive' if analysis.polarity > 0 else 'Negative' if analysis.polarity < 0 else 'Neutral' }")
        # return {"Output_summary :":out.text,"Keywords":keys.text.replace("\n",","),"Sentiment":f"{'Positive' if analysis.polarity > 0 else 'Negative' if analysis.polarity < 0 else 'Neutral' }"}
        # print(translation.text)
        # print(translation.extra_data)
        return sentiment(out.text.strip())[0]['label'], keys.text.replace("\n",","), out.text.strip() 

    except Exception as e:
      raise e 




# input_eng = input()
# data_summarizer(input_eng)
import gradio as gr   
interface = gr.Interface(fn=data_summarizer, 
                         inputs=gr.inputs.Textbox(lines=20, placeholder='Past your  input text...'),outputs=['text',"text","text"])
interface.launch(inline  = False)