Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import torch | |
| from torch import nn | |
| from torch.nn import functional as F | |
| import torchvision | |
| from torchvision import transforms | |
| from huggingface_hub import hf_hub_download | |
| REPO_ID = "Kr1n3/Fashion-Items-Classification" | |
| FILENAME = "best.pt" | |
| yolov5_weights = hf_hub_download(repo_id=REPO_ID, filename=FILENAME) | |
| model = torch.hub.load('ultralytics/yolov5', 'custom', path=yolov5_weights, force_reload=True) | |
| data_transform1=transforms.Compose([ | |
| transforms.Resize((224,224)), | |
| transforms.ToTensor(), | |
| transforms.Normalize((0.485,0.456,0.406),(0.229,0.224,0.225)) | |
| ]) | |
| title = " Fashion Items Classification" | |
| description = """This model is a small demonstration, trained with female fashion items divided in 5 classes: Bag, Dress, Pants, Shoes and Skirt. | |
| """ | |
| #examples=[['https://github.com/Kr1n3/MPC_2022/blob/main/dataset/bag_14.JPG?raw=true'],['https://github.com/Kr1n3/MPC_2022/blob/main/dataset/dress_45.JPG?raw=true'],['https://github.com/Kr1n3/MPC_2022/blob/main/dataset/pants_30.jpeg?raw=true']] | |
| classes=['Bag','Dress','Pants','Shoes','Skirt'] | |
| def predict(img): | |
| imag=data_transform1(img) | |
| inp =imag.unsqueeze(0) | |
| outputs=model(inp) | |
| pred=F.softmax(outputs[0], dim=0).cpu().data.numpy() | |
| confidences = {classes[i]:(float(pred[i])) for i in range(5)} | |
| return confidences | |
| gr.Interface(predict,gr.inputs.Image(type='pil'),title=title,description=description,outputs='label').launch(debug=True) | |