File size: 1,323 Bytes
538d0bc
713f67e
166ff57
 
538d0bc
 
166ff57
 
 
 
 
 
 
 
713f67e
 
 
 
 
538d0bc
166ff57
 
 
 
713f67e
 
166ff57
 
 
713f67e
166ff57
538d0bc
166ff57
 
 
713f67e
166ff57
 
 
 
 
 
 
 
 
538d0bc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import gradio as gr
from PIL import Image
import torch
from ultralyticsplus import YOLO, render_result


def launch(
    image: gr.Image = None,
    image_size: gr.Slider = 640,
    conf_threshold: gr.Slider = 0.4,
    iou_threshold: gr.Slider = 0.50,
):
    try:
        model_path = "./models/student-behaviour-best.pt"
        model = YOLO(
            "./student-behaviour-test-deploy/models/OWN-DATASET-640-e120-b32-best.pt"
        )

        # pil_image = Image.fromarray(image)

        results = model.predict(
            image, conf=conf_threshold, iou=iou_threshold, imgsz=image_size
        )
        box = results[0].boxes
        # print(box)

        render = render_result(model=model, image=image, result=results[0])
        return render
    except Exception as e:
        print("error", e)
        return "./download.jpeg"


inputs = [
    gr.Image(type="filepath", label="Input Image"),
    gr.Slider(minimum=256, maximum=1280, value=640, step=32, label="Image Size"),
    gr.Slider(
        minimum=0.0, maximum=1.0, value=0.4, step=0.1, label="Confidence Threshold"
    ),
    gr.Slider(minimum=0.0, maximum=1.0, value=0.4, step=0.1, label="IOU Threshold"),
]

outputs = gr.Image(type="filepath", label="Output Result")

iface = gr.Interface(fn=launch, inputs=inputs, outputs=outputs)
iface.launch()