|
import mxnet as mx |
|
import matplotlib.pyplot as plt |
|
import numpy as np |
|
from collections import namedtuple |
|
from mxnet.gluon.data.vision import transforms |
|
import os |
|
import gradio as gr |
|
|
|
from PIL import Image |
|
import imageio |
|
import onnxruntime as ort |
|
|
|
def get_image(path): |
|
''' |
|
Using path to image, return the RGB load image |
|
''' |
|
img = imageio.imread(path, pilmode='RGB') |
|
return img |
|
|
|
|
|
def preprocess(img): |
|
''' |
|
Preprocessing required on the images for inference with mxnet gluon |
|
The function takes loaded image and returns processed tensor |
|
''' |
|
img = np.array(Image.fromarray(img).resize((224, 224))).astype(np.float32) |
|
img[:, :, 0] -= 123.68 |
|
img[:, :, 1] -= 116.779 |
|
img[:, :, 2] -= 103.939 |
|
img[:,:,[0,1,2]] = img[:,:,[2,1,0]] |
|
img = img.transpose((2, 0, 1)) |
|
img = np.expand_dims(img, axis=0) |
|
|
|
return img |
|
|
|
mx.test_utils.download('https://s3.amazonaws.com/model-server/inputs/kitten.jpg') |
|
|
|
mx.test_utils.download('https://s3.amazonaws.com/onnx-model-zoo/synset.txt') |
|
with open('synset.txt', 'r') as f: |
|
labels = [l.rstrip() for l in f] |
|
|
|
os.system("wget https://github.com/onnx/models/raw/main/vision/classification/inception_and_googlenet/googlenet/model/googlenet-9.onnx") |
|
|
|
ort_session = ort.InferenceSession("googlenet-9.onnx") |
|
|
|
|
|
def predict(path): |
|
img_batch = preprocess(get_image(path)) |
|
|
|
outputs = ort_session.run( |
|
None, |
|
{"data_0": img_batch.astype(np.float32)}, |
|
) |
|
|
|
a = np.argsort(-outputs[0].flatten()) |
|
results = {} |
|
for i in a[0:5]: |
|
results[labels[i]]=float(outputs[0][0][i]) |
|
return results |
|
|
|
|
|
title="AlexNet" |
|
description="AlexNet is the name of a convolutional neural network for classification, which competed in the ImageNet Large Scale Visual Recognition Challenge in 2012." |
|
|
|
examples=[['catonnx.jpg']] |
|
gr.Interface(predict,gr.inputs.Image(type='filepath'),"label",title=title,description=description,examples=examples).launch(enable_queue=True,debug=True) |