File size: 1,869 Bytes
6089e3b 90034f3 6089e3b 06f8313 6089e3b 06f8313 6089e3b 06f8313 6089e3b 06f8313 449ab17 7ff054e 449ab17 7ff054e 449ab17 7ff054e 449ab17 7ff054e 449ab17 7ff054e 449ab17 7ff054e 06f8313 7ff054e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
# #Import the required Libraries
# import gradio as gr
# import pickle
# import pandas as pd
# import numpy as np
# import transformers
# # Load from the Hugging Face model hub
# model = transformers.AutoModel.from_pretrained('AmpomahChief/sentiment_analysis_on_covid_tweets')
# # Define a function that takes in input and passes it through the model
# def predict(inputs):
# input_ids = transformers.BertTokenizer.from_pretrained('AmpomahChief/sentiment_analysis_on_covid_tweets').encode(inputs, return_tensors='pt')
# output = model(input_ids)[0]
# return output
# # Create a Gradio interface for the model
# interface = gr.Interface(fn=predict, inputs=gr.Textbox(prompt="Input text:"), outputs=gr.Textbox(prompt="Model output:"))
# # Launch the interface
# interface.launch()
import gradio as gr
# Creating a gradio app using the inferene API
App = gr.Interface.load("huggingface/AmpomahChief/sentiment_analysis_on_covid_tweets",
title="COVID 19 tweets sentiment analysis", description ="This is a sentiment analysis on COVID 19 tweets using pretrained model on hugging face",
allow_flagging=False, examples=[["Input your text here"]]
)
App.launch()
# import gradio as gr
# from transformers import pipeline
# import transformers
# Model = transformers.AutoModel.from_pretrained('AmpomahChief/sentiment_analysis_on_covid_tweets')
# pipeline = pipeline(task="image-classification", model=Model)
# def predict(inputs):
# input_ids = transformers.BertTokenizer.from_pretrained('AmpomahChief/sentiment_analysis_on_covid_tweets').encode(inputs, return_tensors='pt')
# output = model(input_ids)[0]
# return output
# # Create a Gradio interface for the model
# interface = gr.Interface(fn=predict, inputs=gr.Textbox(prompt="Input text:"), outputs=gr.Textbox(prompt="Model output:"))
# # Launch the interface
# interface.launch() |