Spaces:
Running
Running
File size: 3,289 Bytes
db66b47 75f1b92 7af13f4 da4611f 48193db 7af13f4 75f1b92 7af13f4 f04ccc3 9f35afb d68a14d 75f1b92 0997d43 7af13f4 48193db 7af13f4 0997d43 75f1b92 0997d43 75f1b92 0997d43 7af13f4 e50a51c db66b47 b2f58d6 db66b47 2a80f3b db66b47 7af13f4 2a80f3b 7af13f4 d3d7595 7af13f4 06b445f a14bbf4 48193db 75f1b92 81d1362 03b5ddd 81d1362 e50a51c 8a3503b f04ccc3 8a3503b 06b445f d68a14d 7af13f4 d68a14d db66b47 75f1b92 8a3503b 9f35afb 8a3503b 75f1b92 d68a14d 7af13f4 d68a14d db66b47 9f35afb d68a14d 9f35afb d68a14d 9f35afb d68a14d e9f4a02 d68a14d 2c98611 d68a14d bafc2e8 d68a14d 2a80f3b 326066b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
# import sklearn
from os import O_ACCMODE
import gradio as gr
import joblib
from transformers import pipeline
import requests.exceptions
from huggingface_hub import HfApi, hf_hub_download
from huggingface_hub.repocard import metadata_load
app = gr.Blocks()
model_id_1 = "nlptown/bert-base-multilingual-uncased-sentiment"
model_id_2 = "microsoft/deberta-base"
model_id_3 = "distilbert-base-uncased-finetuned-sst-2-english"
model_id_4 = "lordtt13/emo-mobilebert"
model_id_5 = "juliensimon/reviews-sentiment-analysis"
def load_agent(model_id):
"""
This function load the agent's results
"""
# Load the metrics
metadata = get_metadata(model_id)
# get predictions
predictions = predict(model_id)
return model_id, predictions
def get_metadata(model_id):
"""
Get the metadata of the model repo
:param model_id:
:return: metadata
"""
try:
readme_path = hf_hub_download(model_id, filename="README.md")
metadata = metadata_load(readme_path)
print(metadata)
return metadata
except requests.exceptions.HTTPError:
return None
def get_prediction(model_id):
classifier = pipeline("text-classification", model=model_id, return_all_scores=True)
def predict(review):
prediction = classifier(review)
print(prediction)
return prediction
return predict
with app:
gr.Markdown(
"""
# Compare Sentiment Analysis Models
Type text to predict sentiment.
""")
with gr.Row():
inp_1= gr.Textbox(label="Type text here.",placeholder="The customer service was satisfactory.")
gr.Markdown(
"""
**Model Predictions**
""")
gr.Markdown(
"""
Model 1 = nlptown/bert-base-multilingual-uncased-sentiment
""")
with gr.Row():
btn1 = gr.Button("Predict - Model 1")
with gr.Row():
out_1 = gr.Textbox(label="Predictions for Model 1")
btn1.click(fn=get_prediction(model_id_1), inputs=inp_1, outputs=out_1)
gr.Markdown(
"""
Model 2 = microsoft/deberta-base
""")
with gr.Row():
btn2 = gr.Button("Predict - Model 2")
with gr.Row():
out_2 = gr.Textbox(label="Predictions for Model 2")
btn2.click(fn=get_prediction(model_id_2), inputs=inp_1, outputs=out_2)
gr.Markdown(
"""
Model 3 = distilbert-base-uncased-finetuned-sst-2-english"
""")
with gr.Row():
btn3 = gr.Button("Predict - Model 3")
with gr.Row():
out_3 = gr.Textbox(label="Predictions for Model 3")
btn3.click(fn=get_prediction(model_id_3), inputs=inp_1, outputs=out_3)
gr.Markdown(
"""
Model 4 = lordtt13/emo-mobilebert
""")
with gr.Row():
btn4 = gr.Button("Predict - Model 4")
with gr.Row():
out_4 = gr.Textbox(label="Predictions for Model 4")
btn4.click(fn=get_prediction(model_id_4), inputs=inp_1, outputs=out_4)
gr.Markdown(
"""
Model 5 = juliensimon/reviews-sentiment-analysis
""")
with gr.Row():
btn5 = gr.Button("Predict - Model 5")
with gr.Row():
out_5 = gr.Textbox(label="Predictions for Model 5")
btn5.click(fn=get_prediction(model_id_5), inputs=inp_1, outputs=out_5)
app.launch() |