import os, sys, time
import numpy as np
import yaml, cv2
import threading, traceback
from threading import Lock, RLock

parent_path = os.path.abspath(os.path.join(__file__, *([".."] * 3)))
sys.path.insert(0, parent_path)

from queue import Queue
from core.utils.logger import logging
from core.task.modules.processor_sequence import ProcessorSequence
from core.algorithm.predict_pipeline import PredictPipeline
from core.task.modules.task_lines import *
from core.utils.general import dynamic_import
from core.utils.error_exception import ErrorException


class TaskPipeline(object):
    """任务管道与资源调度
    以插件的形式将流程模块插入管道并管理和调度显卡资源"""

    def __init__(self, cfg: dict) -> None:
        self.status = 0
        if isinstance(cfg, dict):
            self.cfg = cfg
            self._cfg = cfg.copy()
        elif isinstance(cfg, str):
            with open(cfg, "r") as f:
                self.cfg = yaml.safe_load(f)
        self.task_configs = self.cfg["SubTaskPipeline"]
        self.tasks = {} # 任务运行队列
        self.running_lock = RLock() # 任务运行队列锁
        self.collect_pool = Queue() # 资源回收池
        # 构造预测资源池
        predict_pipe_cfg = self.cfg["PredictPipeline"]
        self.predict_pipeline_pool = Queue()
        for _ in range(predict_pipe_cfg["pool_size"]):
            self.predict_pipeline_pool.put(PredictPipeline(**predict_pipe_cfg["cfg"]))
        self.status = 1
        self.status = 2
        collecton_loop_th = threading.Thread(target=self._loop)
        collecton_loop_th.start()
        
    def _loop(self,): # 资源回收循环
        while self.status == 2:
            # logging.info("|STA: redictor source: {}, running task: {}|".format(self.predict_pipeline_pool.qsize(), len(self.tasks)))
            try:
                idx = self.collect_pool.get_nowait()
            except:
                time.sleep(2)
                continue

            # ---lock start
            self.running_lock.acquire()
            if idx in self.tasks:
                if self.tasks[idx].is_close():
                    self._task_pop(idx)
                    logging.info("task {} done by auto collection".format(idx))
                else:
                    self.collect_pool.put(idx)
            self.running_lock.release()
            # ---lock done   
            time.sleep(1)

    def _task_initialization(self, tag)->BaseTask:
        """
        实例化子任务
        """
        predictor = self.predict_pipeline_pool.get_nowait()
        try:
            task_cfg = self.task_configs[tag].copy()
            task_type = task_cfg.pop("type")
            cfgs = task_cfg.pop("cfg")
            processes = {}
            for key, processors in task_cfg.items():
                if processors is None:
                    processes[key] = None
                else:
                    processes[key] = ProcessorSequence(processors)

            task_instance = eval(task_type)(
                predictor = predictor,
                **processes,
                **cfgs
            )
        except Exception as e:
            self.predict_pipeline_pool.put(predictor)
            raise e
        return task_instance

    def task_add(self, data: dict)->None:
        """添加任务
        根据task_pipeline.yml文件中的TaskPipeline参数下的任务配置进行添加, 并实例化任务
        注意如果任务id已经存在, 则不会进行任何操作
        """
        try:
            id = data["id"]
            tag = data["tag"]
            if id in self.tasks:
                raise Exception("task id {} is running".format(id))
            
            # 实例化任务
            task = self._task_initialization(tag=tag)
            with self.running_lock:
                self.tasks[id] = task
                # 执行任务
                try:
                    data = data["data"]
                    data["id"] = id
                    task.set(data) # 初始化任务参数
                    data = task(data)
                finally:
                    if isinstance(task, LineTask): # 线性任务不使用自动回收机制
                        self._task_pop(id)
                    else:
                        self.collect_pool.put(id) # 写入回收池等待自动回收
        except ErrorException as e:
            self._task_pop(id)
            raise e
        except:
            logging.error("|add task fail|")
            logging.error(traceback.format_exc())
        return data

    def task_set(self, data: dict)->None:
        """任务重配置
        task_pipeline.yml中会包含任务的初始配置,
        当需要重新配置参数的时候调用(比如修改拉流的地址)
        会根据你传入的data字典自动匹配数据
        """
        try:
            self.tasks[data["id"]].set(data)
        except:
            logging.error(traceback.format_exc())

    def _task_pop(self,id)->None:
        try:
            with self.running_lock:
                if not id in self.tasks:
                    logging.info("|task {} not in runing_task, don't need pop.|".format(id))
                else:
                    logging.info("|task {} done.|".format(id))
                    task = self.tasks.pop(id)
                    task.close()
                    self.predict_pipeline_pool.put(task.predictor)
        except Exception as e:
            raise e


    def task_pop(self, data)->None:
        """关闭任务
        data={
            "id":
        }
        """
        try:
            id = data["id"]
            self._task_pop(id)
        except:
            logging.error(traceback.format_exc())
        return data


if __name__ == "__main__":

    def loop_task_test():
        import glob, time

        cfg_file = "core/task/task_samples/video_reader_writer_pipeline.yml"
        video_file_path = "data/input/car_test_video.mp4"
        task_pipeline = TaskPipeline(cfg_file)
        data = {
            "tag": "Video_FTP", 
            "id": "1", 
            "data": 
                {
                    "SteamReader_path":video_file_path,
                    "VideoWriter_save_path": "/home/smartgis/workspace/project/smart_server/data/output/car_test_video.mp4",
                }
            }

        task_pipeline.task_add(data=data)
        idx = 0
        while True:
            time.sleep(1)
            idx += 1

    def rtmp_test():
        """
        用于模块测试
        """
        import glob, time

        cfg_file = "projects/wind_power/configs/task_pipeline_cfg.yml"
        task_pipeline = TaskPipeline(cfg_file)
        data = {"tag": "RTMP", "id": "1", "data": {}}

        task_pipeline.task_add(data=data)
        while True:
            time.sleep(1)
            idx += 1

    def pred_test():
        from base64 import b64decode, b64encode

        task_cfg = "projects/wind_power/configs/algorithm_pipeline_cfg.yml"
        file_path = "data/input/85.jpg"
        img_np = cv2.imread(file_path)
        ret, img_jpg_encode = cv2.imencode(".jpg", img_np)
        img_byte = img_jpg_encode.tobytes()
        img_b64 = b64encode(img_byte).decode("utf-8")
        task_pipeline = TaskPipeline(task_cfg)
        data = {
            "tag":"DET",
            "id": 1,
            "data":{
                "id": 1,
                # "Base64Decode_mode": "RGB",
                "input_data":img_np
            }
        }
        task_pipeline.task_add(data)
        data = task_pipeline(data["data"])
        print("")

    rtmp_test()
    while True:
        time.sleep(1)
    logging.info("|INFO:done|")
