narugo1992
dev(narugo): use 0.7 model
59c1c71
raw
history blame
1.64 kB
from functools import lru_cache
from typing import List, Tuple
from huggingface_hub import hf_hub_download
from imgutils.data import ImageTyping, load_image, rgb_encode
from onnx_ import _open_onnx_model
from plot import detection_visualize
from yolo_ import _image_preprocess, _data_postprocess
_HAND_MODELS = [
'hand_detect_v0.7_s',
'hand_detect_v0.6_s',
'hand_detect_v0.5_s',
'hand_detect_v0.4_s',
'hand_detect_v0.3_s',
'hand_detect_v0.2_s',
'hand_detect_v0.1_s',
'hand_detect_v0.1_n',
]
_DEFAULT_HAND_MODEL = _HAND_MODELS[0]
@lru_cache()
def _open_hand_detect_model(model_name):
return _open_onnx_model(hf_hub_download(
f'deepghs/anime_hand_detection',
f'{model_name}/model.onnx'
))
_LABELS = ['hand']
def detect_hands(image: ImageTyping, model_name: str, max_infer_size=640,
conf_threshold: float = 0.35, iou_threshold: float = 0.7) \
-> List[Tuple[Tuple[int, int, int, int], str, float]]:
image = load_image(image, mode='RGB')
new_image, old_size, new_size = _image_preprocess(image, max_infer_size)
data = rgb_encode(new_image)[None, ...]
output, = _open_hand_detect_model(model_name).run(['output0'], {'images': data})
return _data_postprocess(output[0], conf_threshold, iou_threshold, old_size, new_size, _LABELS)
def _gr_detect_hands(image: ImageTyping, model_name: str, max_infer_size=640,
conf_threshold: float = 0.35, iou_threshold: float = 0.7):
ret = detect_hands(image, model_name, max_infer_size, conf_threshold, iou_threshold)
return detection_visualize(image, ret, _LABELS)