Spaces:
Runtime error
Runtime error
# import gradio as gr | |
# from peft import PeftModel, PeftConfig | |
# from transformers import AutoModelForSequenceClassification, AutoTokenizer | |
# import torch | |
# import json | |
# # Load id2label | |
# with open('id2label.json', 'r') as f: | |
# id2label = json.load(f) | |
# # Load label2id | |
# with open('label2id.json', 'r') as f: | |
# label2id = json.load(f) | |
# print("ID2LABEL", id2label) | |
# print("LABEL2ID", label2id) | |
# # Download model from end-point | |
# MODEL ="xlm-roberta-large" | |
# peft_model_id = "JAdeojo/xlm-roberta-large-lora-consumer-complaints-cfpb" | |
# config = PeftConfig.from_pretrained(peft_model_id) | |
# inference_model = AutoModelForSequenceClassification.from_pretrained( | |
# MODEL, | |
# num_labels=len(id2label), | |
# id2label=id2label, label2id=label2id, | |
# # ignore_mismatched_sizes=True | |
# ) | |
# tokenizer = AutoTokenizer.from_pretrained(MODEL) | |
# model = PeftModel.from_pretrained(inference_model, "JAdeojo/xlm-roberta-large-lora-consumer-complaints-cfpb") | |
# # run inference | |
# def classify_complaint(Complaints, id2label): | |
# inputs = tokenizer(Complaints, return_tensors="pt") | |
# with torch.no_grad(): | |
# logits = model(**inputs).logits | |
# # tokens = inputs.tokens() | |
# predictions = torch.argmax(logits, dim=-1) | |
# predicted_label = predictions.item() | |
# predicted_class = id2label[predicted_label] | |
# return predicted_class | |
# demo = gr.Interface(fn=classify_complaint, inputs="text", outputs="text") | |
# demo.launch() | |
import gradio as gr | |
from peft import PeftModel, PeftConfig | |
from transformers import AutoModelForSequenceClassification, AutoTokenizer | |
import torch | |
import json | |
# Load id2label | |
with open('id2label.json', 'r') as f: | |
id2label = json.load(f) | |
# Load label2id | |
with open('label2id.json', 'r') as f: | |
label2id = json.load(f) | |
print("ID2LABEL", id2label) | |
print("LABEL2ID", label2id) | |
# Download model from end-point | |
MODEL ="xlm-roberta-large" | |
peft_model_id = "JAdeojo/xlm-roberta-large-lora-consumer-complaints-cfpb_49k" | |
config = PeftConfig.from_pretrained(peft_model_id) | |
inference_model = AutoModelForSequenceClassification.from_pretrained( | |
MODEL, | |
num_labels=len(id2label), | |
id2label=id2label, label2id=label2id, | |
# ignore_mismatched_sizes=True | |
) | |
tokenizer = AutoTokenizer.from_pretrained(MODEL) | |
model = PeftModel.from_pretrained(inference_model, peft_model_id) | |
# run inference | |
def classify_complaint(Complaints): | |
inputs = tokenizer(Complaints, return_tensors="pt") | |
with torch.no_grad(): | |
logits = model(**inputs).logits | |
predictions = torch.argmax(logits, dim=-1) | |
predicted_label = predictions.item() | |
predicted_class = id2label[str(predicted_label)] # Make sure to access id2label with a string key | |
return predicted_class | |
# demo = gr.Interface(fn=classify_complaint, inputs=("text", "Enter the financial complaint:"), outputs=("text", "Complaint Category")) | |
# demo.launch() | |
input_component = gr.inputs.Textbox(label="Enter the financial complaint:") | |
output_component = gr.outputs.Textbox(label="Complaint Category") | |
demo = gr.Interface(fn=classify_complaint, inputs=input_component, outputs=output_component) | |
demo.launch() | |