import signal


# 丢弃率实验结果（每5s统计）：
# 9进程：丢弃率 10~20%【大多数在10~14%】；FPS 20~21
# 6进程：丢弃率 8~20%【大多数在10~14】；FPS 20~21
# 3进程：丢弃率 3~12%【大多数在3~7%】；FPS 24
# 2进程：丢弃率 0~3%；FPS 23~24
# 结论1：开始时丢弃率较高，后面稳定后丢弃率会下降
# 结论2：多进程导致丢弃率上升，会使计算结果白白浪费，丢失的较多还会使帧率下降
# 总结：有必要保留所有计算结果
# 过滤一个非有序序列，将其过滤成递增有序序列，其余元素抛弃
class Filter:

    def __init__(self):
        self._last_id = None

    def __call__(self, id):
        if self._last_id is None:
            self._last_id = id
            return True
        if self._last_id >= id:
            return False
        self._last_id = id
        return True

# 将一个乱序序列规整成有序序列
class OrderQueue:
    ID_LIMIT = 65535  # ID上限

    def __init__(self, data_queue):
        self._data_queue = data_queue
        self._data_cache = {}
        self._wanted_id = 0

    def get(self):
        while True:
            wanted_data = self._data_cache.pop(self._wanted_id, None)
            if wanted_data is not None:
                self._step()
                return wanted_data
            
            data = self._data_queue.get()
            # print(self._data_queue.qsize())
            id = data[0]
            if self._wanted_id == id:
                self._step()
                return data
            self._data_cache[id] = data

    def _step(self):
        self._wanted_id = (self._wanted_id + 1) % OrderQueue.ID_LIMIT

def linear(min, max, percent):
    return min + (max - min) * percent

def register_exit(callback=None):
    from logger import logger
    def wrapper(*_):
        if callback:
            callback()
        exit()
    signal.signal(signal.SIGTERM, wrapper)
    signal.signal(signal.SIGINT, wrapper)
