Spaces:
Runtime error
Runtime error
import torch | |
import gradio as gr | |
from torch import nn | |
from torch.nn import functional as F | |
import torchvision | |
from PIL import Image | |
from torchvision import transforms | |
transformer = transforms.Compose([ | |
transforms.Resize((224, 224)),#standard pixel value of image which we want to pass in resnet18 | |
transforms.RandomHorizontalFlip(), | |
transforms.RandomRotation(degrees=10), | |
transforms.ToTensor(), | |
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])#(mean,std deviation)these values will be same for all the resnet models coz all are trained on imageNet | |
]) | |
model1=torch.jit.load('scripted_vehicle_model.pt',map_location=torch.device('cpu')) | |
classes=['Bus','bicycle','car'] | |
def predict(inp): | |
inp=transformer(inp).unsqueeze(0) | |
#inp = transforms.ToTensor()(inp).unsqueeze(0) | |
with torch.no_grad(): | |
prediction =F.softmax(model1(inp)[0], dim=0) | |
confidences = {classes[i]: float(prediction[i]) for i in range(3)} | |
return confidences | |
gr.Interface(predict,inputs=gr.inputs.Image(label="Input Image"),outputs='label').launch(debug='True') | |