import os
import json
import struct
import base64
import uuid
import cv2
import datetime
import time
import logging
from kafka import KafkaProducer
from multiprocessing import Queue
import signal

import sys
sys.path.append("../")
from config import *
from multiprocessing import Process
import logging.config
logging.config.fileConfig("logger.conf")
logger = logging.getLogger("pipeline")

bootstrap_servers = os.environ.get("KAFKA_URL")
producer = None

processes = {}
process_names = {}
target_dict = {}
args_dict = {}
n = 0


def float_arr_2_base64(float_arr):
    """
    向量转换为base64
    """
    buf = struct.pack('<%sf' % len(float_arr), *float_arr)
    b = base64.b64encode(buf)
    s = str(b, encoding="utf-8")
    # _logger.info("base64 = %s", s)
    return s


def get_id():
    """
    获取UUID
    """
    return "".join(str(uuid.uuid4()).split("-")).upper()


def send_kafka(topic, message):
    """
    发送kafka消息
    """
    global producer
    if producer is None:
        producer = KafkaProducer(value_serializer=lambda v: json.dumps(v).encode('utf-8'),
                      bootstrap_servers=bootstrap_servers.split(","))
    try:
        producer.send(topic, message)
    except:
        logger.info("[kafka]---->发送：失败")
    logger.info(f"[kafka]---->发送:{topic},成功")


def upload_pic(file_path, image):
    """
    上传图片
    """
    image = image[:, :, ::-1]
    cv2.imwrite(file_path, image)


def current_time():
    """
    13位时间戳
    """
    timestamp = int(time.mktime(datetime.datetime.now().timetuple()) * 1000 + datetime.datetime.now().microsecond / 1000.0)
    return timestamp


def crop_bounding_boxes(image, x, y, w, h):
    """
    图片裁剪
    """
    crop_image = image[y - 50: y + h + 50, x - 50:  x + w + 50]
    return crop_image


def xywh2xyxy(x):
    x_c, y_c, w, h = x
    b = ((x_c - 0.5 * w), (y_c - 0.5 * h),
         (x_c + 0.5 * w), (y_c + 0.5 * h))
    return b


def xyxy2xywh(x):
    x0, y0, x1, y1 = x
    b = ((x0 + x1) / 2.0, (y0 + y1) / 2.0, (x1 - x0), (y1 - y0))
    return b


def monitor_queues(queues: list):
    while True:
        for queue in queues:
            if queue[1].qsize() >= int(os.environ.get("MAX_QUEUE_SIZE")):
                logger.info(f"[monitor]---->{queue[0]}队列:{queue[1].qsize()}， 执行清理")
                queue[1].clear()
            else:
                logger.info(f"[monitor]---->{queue[0]}队列大小:{queue[1].qsize()}")
        # 100秒监控一次队列大小并清理
        time.sleep(100)


def print_queues(queues: list):
    while True:
        for queue in queues:
            logger.info(f"[monitor]---->{queue[0]}队列大小:{queue[1].qsize()}")
        # 100秒打印一次队列大小
        time.sleep(100)


def add_process(name=None, func=None, args=None):
    global processes
    global n
    global target_dict
    global args_dict
    p = Process(target=func, args=args)
    p.daemon = True
    p.start()
    processes[n] = p
    target_dict[n] = func
    args_dict[n] = args
    process_names[n] = name
    n = n + 1


def monitor_processes():
    global processes, target_dict, args_dict
    while len(processes) > 0:
        time.sleep(10)
        try:
            for i in processes.keys():
                p = processes[i]                
                if not p.is_alive():
                    # 重启子进程
                    try:
                        os.kill(p.pid, signal.SIGTERM)
                        cmd = 'kill -9 %d' % (int(p.pid))
                        os.system(cmd)
                        time.sleep(10)
                    except:
                        pass
                    newp = Process(target=target_dict[i], args=args_dict[i])
                    newp.daemon = True
                    newp.start()
                    processes[i] = newp
                    logger.info(f"[monitor]---->进程[{process_names[i]}]：重启成功")
                else:
                    logger.info(f"[monitor]---->进程[{process_names[i]}]：进程正常运行中")
        except:
            pass


def decode_dis(video_process_dict: dict, q_decode: Queue, q_decode_list: list):
    while True:
        data = q_decode.get()
        vid = data["video_id"]
        q_index = video_process_dict[vid]
        q_decode_list[q_index].put(data)


def join_jobs():
    if len(processes) > 0:
        for i in processes.keys():
            p = processes[i]
            p.join()