Spaces:
Runtime error
Runtime error
File size: 1,919 Bytes
c23917f 83c2477 c23917f f21da21 c23917f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 |
from typing import List
import cv2
import torch
import numpy as np
import src.config as config
from pytorch_grad_cam.utils.image import show_cam_on_image
from src.model_yolov3 import YOLOv3
from src.utils import cells_to_bboxes, non_max_suppression, draw_predictions, YoloCAM
model = YOLOv3(num_classes=20)
weights_path = "/home/user/app/Final_trained_model.pth"
ckpt = torch.load(weights_path, map_location="cpu")
model.load_state_dict(ckpt)
model.eval()
print("[x] Model Loaded..")
scaled_anchors = (
torch.tensor(config.ANCHORS)
* torch.tensor(config.S).unsqueeze(1).unsqueeze(1).repeat(1, 3, 2)
).to(config.DEVICE)
cam = YoloCAM(model=model, target_layers=[model.layers[-2]], use_cuda=False)
def predict(image: np.ndarray, iou_thresh: float = 0.5, thresh: float = 0.4, show_cam: bool = False, transparency: float = 0.5) -> List[np.ndarray]:
with torch.no_grad():
transformed_image = config.transforms(image=image)["image"].unsqueeze(0)
output = model(transformed_image)
bboxes = [[] for _ in range(1)]
for i in range(3):
batch_size, A, S, _, _ = output[i].shape
anchor = scaled_anchors[i]
boxes_scale_i = cells_to_bboxes(
output[i], anchor, S=S, is_preds=True
)
for idx, (box) in enumerate(boxes_scale_i):
bboxes[idx] += box
nms_boxes = non_max_suppression(
bboxes[0], iou_threshold=iou_thresh, threshold=thresh, box_format="midpoint",
)
plot_img = draw_predictions(image, nms_boxes, class_labels=config.PASCAL_CLASSES)
if not show_cam:
return [plot_img]
grayscale_cam = cam(transformed_image, scaled_anchors)[0, :, :]
img = cv2.resize(image, (416, 416))
img = np.float32(img) / 255
cam_image = show_cam_on_image(img, grayscale_cam, use_rgb=True, image_weight=transparency)
return [plot_img, cam_image]
|