File size: 1,335 Bytes
317c901
ec2a6ff
 
c6d39f4
ec2a6ff
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10b4b12
ec2a6ff
38a4f39
 
ec2a6ff
05ede16
1828143
05ede16
1828143
 
 
fbf9586
42b4f1b
ec2a6ff
b5e3de3
1828143
 
 
317c901
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import gradio as gr
from PIL import Image, ImageDraw
from transformers import pipeline
import numpy as np


def plot_results(image, results, threshold=0.7):
    image = Image.fromarray(np.uint8(image))
    draw = ImageDraw.Draw(image)
    for result in results:
        score = result["score"]
        label = result["label"]
        box = list(result["box"].values())
        if score > threshold:
            x, y, x2, y2 = tuple(box)
            draw.rectangle((x, y, x2, y2), outline="red", width=1)
            draw.text((x, y), label, fill="white")
            draw.text(
                (x + 0.5, y - 0.5),
                text=str(score),
                fill="green" if score > 0.7 else "red",
            )
    return image

def predict(image):
    # make the object detection pipeline
    obj_detector = pipeline(
        "object-detection", model="Antoine101/detr-resnet-50-dc5-fashionpedia-finetuned"
    )
    results = obj_detector(image)
    return plot_results(image, results)

title = "Are you fashion?"
description = """
DETR model finetuned on "detection-datasets/fashionpedia" for apparels detection.
"""

demo = gr.Interface(
    fn=predict, 
    inputs=gr.Image(label="Input Image", type="pil"), 
    outputs="image",
    examples=[["example1.jpg"]],
    title=title,
    description=description
)
demo.launch()