File size: 1,807 Bytes
ca44e3b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
#!/usr/bin/env python3
"""
Script for Gradio Application for Yolov3 model
"""
import numpy as np
import gradio as gr
import torch
from yolo_model import YOLOv3

import config
from inference import predict, YoloCAM


model_path = 'epoch=39-step=41400.ckpt'
model = YOLOv3.load_from_checkpoint(model_path, map_location=torch.device('cpu'), strict=False, num_classes=config.NUM_CLASSES)
model.eval()

cam = YoloCAM(model=model, target_layers=[model.layers[-2]], use_cuda=False)


def inference(
    org_img: np.ndarray,
    iou_thresh: float, thresh: float,
    show_cam: str,
    transparency: float,
):
    outputs = predict(cam, model, org_img, iou_thresh, thresh, show_cam, transparency)
    return outputs


if __name__ == '__main__':
    title = "YoloV3 from Scratch on Pascal VOC Dataset with GradCAM"
    description = f"Pytorch Implemetation of YoloV3 trained from scratch on Pascal VOC dataset with GradCAM \n Classes in pascol voc: {', '.join(config.PASCAL_CLASSES)}"
    examples = [
        ["examples/dog.jpg", 0.5, 0.4, True, 0.5],
        ["examples/cat.jpg", 0.6, 0.5, True, 0.5],
        ["examples/car.jpg", 0.6, 0.5, True, 0.5],
        ["examples/plane.jpg", 0.6, 0.5, True, 0.5],
        ["examples/bird.jpg", 0.6, 0.5, True, 0.5],
    ]

    demo = gr.Interface(
        inference,
        inputs=[
            gr.Image(label="Input Image"),
            gr.Slider(0, 1, value=0.5, label="IOU Threshold"),
            gr.Slider(0, 1, value=0.4, label="Threshold"),
            gr.Checkbox(label="Show Grad Cam"),
            gr.Slider(0, 1, value=0.5, label="Opacity of GradCAM"),
        ],
        outputs=[
            gr.Gallery(rows=2, columns=1),
        ],
        title=title,
        description=description,
        examples=examples,
    )
    gr.close_all()
    demo.launch()