File size: 4,681 Bytes
bcf5b4d
 
a1c20c2
 
01a30bf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ad7d19b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
import os
import gradio as gr
import transformers
import torch

from transformers import pipeline, AutoTokenizer, AutoModelForSeq2SeqLM, GPT2Tokenizer, GPT2Model, AutoModelForCausalLM
import gradio as gr

def translate_text(text, language):
    if language == 'English to Hindi':
        tokenizer = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-en-hi")
        model = AutoModelForSeq2SeqLM.from_pretrained("Helsinki-NLP/opus-mt-en-hi")
    elif language == 'English to French':
        tokenizer = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-en-fr")
        model = AutoModelForSeq2SeqLM.from_pretrained("Helsinki-NLP/opus-mt-en-fr")
    elif language == 'English to Spanish':
        tokenizer = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-en-es")
        model = AutoModelForSeq2SeqLM.from_pretrained("Helsinki-NLP/opus-mt-en-es")
    else:
        return text

    inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True)
    outputs = model.generate(**inputs)
    translation = tokenizer.decode(outputs[0], skip_special_tokens=True)
    return translation

summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
def summarize_article(article):
    summary = summarizer(article, max_length=30, min_length=10, do_sample=False)
    return summary[0]['summary_text']

distilled_student_sentiment_classifier = pipeline(
    model="lxyuan/distilbert-base-multilingual-cased-sentiments-student",
    return_all_scores=True
)

def sentiment_analysis(text):
    result = distilled_student_sentiment_classifier(text)
    score = max(result[0], key=lambda x: x['score'])
    label = score['label']
    mood = "Moderate"

    if label == "positive":
        if score['score'] > 0.75:
            mood = "Very Happy"
        else:
            mood = "Happy"
    elif label == "negative":
        if score['score'] > 0.75:
            mood = "Very Sad"
        else:
            mood = "Sad"
    else:
        mood = "Neutral"

    return mood

generator = pipeline('text-generation', model='gpt2')
def generate_text(prompt):
    generated_texts = generator(prompt, max_length=150, num_return_sequences=1)
    return generated_texts[0]['generated_text']

# Code Generation
model = AutoModelForCausalLM.from_pretrained("Salesforce/codegen-350M-mono")
tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen-350M-mono")

def generate_code(prompt):
    inputs = tokenizer(prompt, return_tensors="pt")
    outputs = model.generate(
        **inputs,
        max_length=200,
        num_return_sequences=1,
        temperature=0.7,
        top_k=50,
        top_p=0.95
    )
    generated_code = tokenizer.decode(outputs[0], skip_special_tokens=True)
    return generated_code

with gr.Blocks() as demo:
    with gr.Tab("Translation"):
        with gr.Row():
            language = gr.Dropdown(label="Select Language", choices=["English to Hindi", "English to French", "English to Spanish"])
            text_input = gr.Textbox(label="Input Text for Translation")
        translate_btn = gr.Button("Translate")
        translation_output = gr.Textbox(label="Translation Output")
        translate_btn.click(fn=translate_text, inputs=[text_input, language], outputs=translation_output)

    with gr.Tab("Summarization"):
        with gr.Row():
            article_input = gr.Textbox(label="Input Article for Summarization")
        summarize_btn = gr.Button("Summarize")
        summary_output = gr.Textbox(label="Summary Output")
        summarize_btn.click(fn=summarize_article, inputs=article_input, outputs=summary_output)

    with gr.Tab("Sentiment Analysis"):
        with gr.Row():
            sentiment_input = gr.Textbox(label="Input Text for Sentiment Analysis")
        sentiment_btn = gr.Button("Analyze Sentiment")
        sentiment_output = gr.Textbox(label="Sentiment Output")
        sentiment_btn.click(fn=sentiment_analysis, inputs=sentiment_input, outputs=sentiment_output)

    with gr.Tab("Text Generation"):
        with gr.Row():
            prompt_input = gr.Textbox(label="Input Prompt for Text Generation")
        generate_btn = gr.Button("Generate Text")
        generation_output = gr.Textbox(label="Generated Text")
        generate_btn.click(fn=generate_text, inputs=prompt_input, outputs=generation_output)

    with gr.Tab("Code Generation"):
        with gr.Row():
            code_prompt_input = gr.Textbox(label="Input Prompt for Code Generation")
        generate_code_btn = gr.Button("Generate Code")
        code_generation_output = gr.Textbox(label="Generated Code")
        generate_code_btn.click(fn=generate_code, inputs=code_prompt_input, outputs=code_generation_output)

demo.launch()