# Marsupial Demo import gradio as gr import torch import torchvision import numpy as np from PIL import Image # Load Marsupial model # TODO: Allow user selectable model? model = torch.hub.load('ultralytics/yolov5:v6.2', 'custom', "model_weights/marsupial_72s_lures.pt", trust_repo=True) def yolo(im, size=640): g = (size / max(im.size)) # gain im = im.resize((int(x * g) for x in im.size)) # resize # model = torch.hub.load('ultralytics/yolov5:v6.2', 'custom', "model_weights/marsupial_72s_lures.pt") results = model(im) # inference results.render() # updates results.imgs with boxes and labels return Image.fromarray(results.imgs[0]) #image = gr.inputs.Image(type="pil", label="Input Image") #chosen_model = gr.inputs.Dropdown(choices = models, value = "model_weights/md_v5a.0.0.pt",type = "value", label="Model Weight") #size = 640 #inputs = [image, chosen_model, size] inputs = gr.inputs.Image(type="pil", label="Input Image") outputs = gr.outputs.Image(type="pil", label="Output Image") title = "Marsupial.ai" description = "Detect and identify 72 different species of Australian wildlife using Marsupial's most detailed model" article = "

The marsupial app makes predictions using a YOLOv5s model that was trained to detect and identify 72 different species of animals found in Australia in camera trap images; find out more about the project on GitHub. This app was built by Dr Henry Lydecker and Dr Gordon McDonald at the Sydney Informatics Hub, a Core Research Facility at the University of Sydney. Find out more about the YOLO model from the original creator, Joseph Redmon. YOLOv5 is a family of compound-scaled object detection models trained on the COCO dataset and developed by Ultralytics, and includes simple functionality for Test Time Augmentation (TTA), model ensembling, hyperparameter evolution, and export to ONNX, CoreML and TFLite. Source code | PyTorch Hub

" examples = [['data/eastern_grey_kangaroo.jpg'],['data/red_fox.jpg'],['data/brushtail_possum.jpg'],['data/koala2.jpg'],['data/cat1.jpg'],['data/australian_magpie.jpg']] gr.Interface(yolo, inputs, outputs, title=title, description=description, article=article, examples=examples, theme="huggingface").launch(enable_queue=True)