import json
import logging
import threading
import time

from tools.celery_worker import app
from tools.constant.redis_keys import ALGO_CHANNEL
from tools.ext.split_basedon_taskname import parse_alg_res_basedon_taskname


class TaskScheduler:
    def __init__(self):
        self.map_task_lock = threading.Lock()
        self.map_task={}
        pass

    def add_task(self,algo_name,task_id,ts=time.time()):
        with self.map_task_lock:
            if self.map_task.__contains__(algo_name) and len(self.map_task[algo_name]) > 0:
                self.map_task[algo_name].append((task_id, ts))
            else:
                self.map_task[algo_name] = [(task_id, ts)]

    def get_tasks(self,algo_name):
        with self.map_task_lock:
            if self.map_task.__contains__(algo_name):
                return self.map_task[algo_name]
            else:
                return []

    def task_clean(self,algo_name):
        with self.map_task_lock:
            self.map_task[algo_name] = []
    def task_size(self,algo_name):
        with self.map_task_lock:
            if self.map_task.__contains__(algo_name):
                return len(self.map_task[algo_name])
            else:
                return 0
        return 0
    def handler_task(self,algo_name,client,time_out=720):
        old_re_append = []
        with self.map_task_lock:
            if self.map_task.__contains__(algo_name) and len(self.map_task[algo_name]):
                task_list = self.map_task[algo_name]
                while len(task_list) > 0:
                    task_id, ts = task_list.pop()
                    end_time = time.time()
                    if (time.time() - ts) < time_out:
                        if client.get(f"celery-task-meta-{task_id}"):
                            algo_res = json.loads(client.get(f"celery-task-meta-{task_id}"))
                            if algo_res['status'] == 'SUCCESS' and algo_res['result'] is not None:
                                algo_data = json.loads(algo_res['result'])
                                if algo_data["code"] and algo_data["code"] == 200:
                                    logging.debug("算法--%s,结果正常" % (algo_res))
                                    client.publish(ALGO_CHANNEL, json.dumps({"algo": algo_name, "data": algo_data}))
                            elif algo_res['status'] in ["PEDING"]:
                                old_re_append.append((task_id, ts))
                        else:
                            old_re_append.append((task_id, ts))
                    else:
                        logging.debug("算法--%s,任务%s超时" % (algo_name, task_id))
        for re_t, re_ts in old_re_append:
            self.add_task(algo_name, re_t, ts=re_ts)
        pass
    def handler_task_same_model(self,algo_name_child,parent_name,algo_run_config,client,time_out=720):
        old_re_append = []
        with self.map_task_lock:
            if self.map_task.__contains__(parent_name) and len(self.map_task[parent_name]):
                task_list = self.map_task[parent_name]
                while len(task_list) > 0:
                    task_id, ts = task_list.pop()
                    end_time = time.time()
                    if (time.time() - ts) < time_out:
                        if client.get(f"celery-task-meta-{task_id}"):
                            algo_res = json.loads(client.get(f"celery-task-meta-{task_id}"))
                            if algo_res['status'] == 'SUCCESS' and algo_res['result'] is not None:
                                algo_data = json.loads(algo_res['result'])
                                if algo_data["code"] and algo_data["code"] == 200:
                                    logging.debug("算法--%s,结果正常" % (algo_res))
                                    #TODO 预处理结果
                                    for child_algo in algo_name_child:
                                        object_type_str=child_algo.split('_')[0]
                                        device_id = algo_data["device_id"]
                                        config = json.loads(algo_run_config.get_algo_configs(parent_name,same_algo=algo_name_child)[device_id])
                                        input_ROIs = config["bus_args"]["input_ROIs"] | [[[0, 0], [1919, 0], [1919, 1079], [0, 1079]], ]
                                        thresh_IOU = config["bus_args"]["thresh_IOU"] | 0.0
                                        object_type = config["algo_args"]["object_type"] | [object_type_str]
                                        thresh_confidence = 0.1
                                        alg_split_res = parse_alg_res_basedon_taskname(algo_res, input_ROIs=input_ROIs,
                                                                                   thresh_IOU=thresh_IOU,
                                                                                   object_type=object_type,
                                                                                   thresh_confidence=thresh_confidence)
                                        client.publish(ALGO_CHANNEL, json.dumps({"algo": child_algo, "data": alg_split_res}))

                                    if self.map_task.__contains__(algo_name_child) and len(self.map_task[algo_name_child]) > 0:
                                        self.map_task[algo_name_child].append((task_id, ts))
                                    else:
                                        self.map_task[algo_name_child] = [(task_id, ts)]
                            elif algo_res['status'] in ["PEDING"]:
                                old_re_append.append((task_id, ts))
                        else:
                            old_re_append.append((task_id, ts))
                    else:
                        logging.debug("算法--%s,任务%s超时" % (parent_name, task_id))
        for re_t, re_ts in old_re_append:
            self.add_task(parent_name, re_t, ts=re_ts)
        pass