import glob
import os
import time
import threading
from ced_inference import CEDInference

model_weights = "weights/best_0809.pt"
gpu_config = {"cuda:0": 2, "cuda:1": 2}
save_dir = "result_img"
conf_thres = 0.5
batch_size = 3

# 启动推理实例
inference_instance = CEDInference(
    weight_file=model_weights,
    gpu_executors=gpu_config,
    result_save_path=save_dir,
    threshold=conf_thres,
    batch_size=batch_size
)

# -------- 推理执行 --------
inference_instance.start_inference_executors()

images_folder = "img"
exts = {".jpg", ".png", ".jpeg", ".bmp"}
all_imgs = [
    f for f in glob.glob(os.path.join(images_folder, "*")) 
    if os.path.splitext(f)[1].lower() in exts
]

for img_file in all_imgs:
    if os.path.splitext(img_file)[1].lower() in exts:
        inference_instance.read_input_image(img_file)

# 等待所有推理任务完成
inference_instance.stop_all_executors()
print("All inference tasks have been completed.")
