File size: 1,778 Bytes
3e99b05
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4f52558
 
3e99b05
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
#!/usr/bin/env python
from tqdm.notebook import tqdm as notebook_tqdm
from detectron2.config import LazyConfig, instantiate
from detectron2.checkpoint import DetectionCheckpointer
from demo.demo import VisualizationDemo, get_parser
import numpy as np
from detectron2.data.detection_utils import read_image
import matplotlib.pyplot as plt
import cv2
import os
import gradio as gr
from PIL import Image

config_file = 'demo-confs/odor_demo.py'
ckpt_pth = 'focaldino_ep18.pth'

try:
    cfg = LazyConfig.load(config_file)
except AssertionError as e:
    if str(e).startswith('Dataset '):
        pass
    else:
        raise e
print("Loading the model...")
model = instantiate(cfg.model)
model.to("cpu")
checkpointer = DetectionCheckpointer(model)
checkpointer.load(ckpt_pth)
model.eval();
print("Model loaded. Let's predict.")


def treat_grayscale(img):
    if len(img.shape) == 2:
        return np.stack((img,)*3, axis=-1)
    else:
        return img

def show_preds(input_image):

    demo = VisualizationDemo(
        model=model,
        min_size_test=800,
        max_size_test=1333,
        img_format='RGB',
        metadata_dataset='odor_test'
    )
    img = treat_grayscale(input_image)
    predictions, visualized_output = demo.run_on_image(img, 0.25)
    visualized_rgb_image = visualized_output.get_image()
    visualized_rgb_image = visualized_rgb_image[:,:,::-1]
    return visualized_rgb_image


gr_interface = gr.Interface(
    fn=show_preds,
    inputs=["image"],
    #outputs=[gr.outputs.Image(type="pil")],
    outputs=[gr.Image(type="pil")],
    title="ODOR Object Detector",
    description="A DINO model with focalnet architecture that detects objects relating to smells. Upload an image or click an example image below to use.",
)
gr_interface.launch()