Sanidhya0909's picture
Update app.py
1f89edb verified
# -*- coding: utf-8 -*-
"""chat.ipynb
Automatically generated by Colab.
Original file is located at
https://colab.research.google.com/drive/1FxpG0gxd0Oigj5CAekXbdwgu7NdtI5sF
"""
# !pip install -r requirements.txt
from transformers import pipeline, Conversation
import gradio as gr
# toy example 1
pipeline(task="sentiment-analysis")("Love this!")
# toy example 2
pipeline(task="sentiment-analysis", model="distilbert-base-uncased-finetuned-sst-2-english")("Love this!")
# defining classifier
classifier = pipeline(task="sentiment-analysis", model="distilbert-base-uncased-finetuned-sst-2-english")
classifier("Hate this.")
# we can also pass in a list to classifier
text_list = ["This is great", \
"Thanks for nothing", \
"You've got to work on your face", \
"You're beautiful, never change!"]
classifier(text_list)
# if there are multiple target labels, we can return them all
classifier = pipeline(task="text-classification", model="SamLowe/roberta-base-go_emotions", top_k=None)
classifier(text_list[0])
summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
text = """
Hugging Face is an AI company that has become a major hub for open-source machine learning.
Their platform has 3 major elements which allow users to access and share machine learning resources.
First, is their rapidly growing repository of pre-trained open-source machine learning models for things such as natural language processing (NLP), computer vision, and more.
Second, is their library of datasets for training machine learning models for almost any task.
Third, and finally, is Spaces which is a collection of open-source ML apps.
The power of these resources is that they are community generated, which leverages all the benefits of open source i.e. cost-free, wide diversity of tools, high quality resources, and rapid pace of innovation.
While these make building powerful ML projects more accessible than before, there is another key element of the Hugging Face ecosystem—their Transformers library.
"""
summarized_text = summarizer(text, min_length=5, max_length=140)[0]['summary_text']
summarized_text
classifier(summarized_text)
chatbot = pipeline(model="facebook/blenderbot-400M-distill")
conversation = chatbot("Hi I'm Shaw, how are you?")
conversation
conversation = chatbot("Where do you work?")
conversation
def top3_text_classes(message, history):
return str(classifier(message)[0][:3]).replace('}, {', '\n').replace('[{', '').replace('}]', '')
demo_sentiment = gr.ChatInterface(top3_text_classes, title="Text Sentiment Chatbot", description="Enter your text, and the chatbot will classify the sentiment.")
demo_sentiment.launch()
def summarizer_bot(message, history):
return summarizer(message, min_length=5, max_length=140)[0]['summary_text']
demo_summarizer = gr.ChatInterface(summarizer_bot, title="Summarizer Chatbot", description="Enter your text, and the chatbot will return the summarized version.")
demo_summarizer.launch()
message_list = []
response_list = []
def vanilla_chatbot(message, history):
conversation = chatbot(message)
return conversation[0]['generated_text']
demo_chatbot = gr.ChatInterface(vanilla_chatbot, title="Vanilla Chatbot", description="Enter text to start chatting.")
demo_chatbot.launch()