AmpomahChief's picture
Update gradioapp.py
06f8313
raw
history blame
1.17 kB
#Import the required Libraries
import gradio as gr
import pickle
import pandas as pd
import numpy as np
import transformers
# Load a BERT model from the Hugging Face model hub
model = transformers.AutoModel.from_pretrained('AmpomahChief/sentiment_analysis_on_covid_tweets')
# Define a function that takes in input and passes it through the model
def predict(inputs):
input_ids = transformers.BertTokenizer.from_pretrained('AmpomahChief/sentiment_analysis_on_covid_tweets').encode(inputs, return_tensors='pt')
output = model(input_ids)[0]
return output
# Create a Gradio interface for the model
interface = gr.Interface(fn=predict, inputs=gr.Textbox(prompt="Input text:"), outputs=gr.Textbox(prompt="Model output:"))
# Launch the interface
interface.launch()
# import gradio as gr
# # Creating a gradio app using the inferene API
# App = gr.Interface.load("huggingface/AmpomahChief/sentiment_analysis_on_covid_tweets",
# title="COVID 19 tweets sentiment analysis", description ="This is a sentiment analysis on COVID 19 tweets using pretrained model on hugging face",
# allow_flagging=False, examples=[["Input your text here"]]
# )
# App.launch()