import multiprocessing
import os
import json
import queue
import sys
import time
import threading
from loguru import logger
from datetime import datetime, timedelta
from kafka import KafkaConsumer, KafkaProducer
from kafka.errors import NoBrokersAvailable, KafkaError
from process import RadarData

dir = os.path.dirname(os.path.abspath(__file__))
log_dir = os.path.join(os.path.dirname(dir), "log", "logs")
if not os.path.exists(log_dir):
    os.makedirs(log_dir)
# 自定义日志格式
logger.remove()  # 移除默认的日志处理器
logger.add(
    sink=os.path.join(log_dir, "main_{time:YYYY-MM-DD}.log"),  # 日志文件名带日期
    format="{time:YYYY-MM-DD HH:mm:ss.SSS} | {level} | {function}:{line} - {message}",
    rotation="00:00",  # 每天午夜轮转（与大小轮转二选一）
    retention="360 days",  # 保留最近360天的日志
    encoding="utf-8",  # 避免中文乱码
    level="INFO",  # 文件日志级别
)


class DataMonitor:
    def __init__(self, config, task_name):
        self.config = config
        self.config_data = config["ConfigData"]
        self.device_info = config["DeviceInfo"]
        self.speed_limit = config["SpeedLimit"]
        self.density_limit = config["DensityLimit"]
        self.flow_limit = config["FlowLimit"]
        self.task_name = task_name
        self.data_queue = queue.Queue(maxsize=1000)
        self.congestion_queue = queue.Queue(maxsize=1000)
        self.code_dict = {}
        self.kafka_producer = None
        self.thread_flag = True
        self.kafka_status = 1

    def init(self):
        # 启动数据监听线程
        threading.Thread(target=self.subscribe_data_kafka).start()
        # 启动数据读取线程
        # threading.Thread(target=self.open_json, args=(path,)).start()
        # 启动数据处理线程
        threading.Thread(target=self.process_radar_data).start()

    def open_json(self, path):
        #  读取数据文件
        with open(path, "r", encoding="utf-8") as file:
            for line in file:
                message = json.loads(line.strip())
                self.data_queue.put(message, block=True)
                time.sleep(0.1)

        # 退出线程
        self.thread_flag = False
        print(path, "over!!!")

    def subscribe_data_kafka(self):
        # 将消费者的偏移量设置为最后一条消息的偏移量
        topic = self.config_data["kafka_cars_topic"]
        kafka_consumer = None
        while True:
            time.sleep(0.002)
            if kafka_consumer is None:
                try:
                    kafka_consumer = KafkaConsumer(
                        topic,
                        bootstrap_servers=self.config_data["kafka_host"],
                        auto_offset_reset='latest',
                        enable_auto_commit=False
                    )
                    self.kafka_status = 0
                    logger.info(f"下行门架数据监听启动, topic:{topic}")
                except NoBrokersAvailable as e:
                    logger.error(f"kafka连接失败, topic:{topic}")
                    kafka_consumer = None
                    time.sleep(10)
                    continue

            while True:
                time.sleep(0.0002)
                if self.kafka_status == 1:
                    logger.error("kafka连接断开")
                    kafka_consumer = None
                    time.sleep(10)
                    break
                try:
                    records = kafka_consumer.poll(timeout_ms=1000)
                    if records:
                        for tp, messages in records.items():
                            for message in messages:
                                if message:
                                    message_value = message.value.decode('utf-8')
                                    message_data = json.loads(message_value)
                                    self.data_queue.put(message_data, block=False)
                except queue.Full:
                    self.data_queue.get(block=False)  # 手动丢弃队头数据（1）
                    logger.warning("队列已满，丢弃队头数据")
                except KafkaError as e:
                    logger.error(f"kafka连接断开, topic:{topic}")
                    kafka_consumer = None
                    time.sleep(10)
                    break
                except Exception as e:
                    logger.opt(exception=e).error(f"解析数据失败, topic:{topic}")
                    pass

    def process_radar_data(self):
        # speed_flag 是否直接使用雷达的速度值， True:直接使用速度，False:使用位置和时间计算速度值
        radar_data = RadarData(self.device_info, self.speed_limit, self.density_limit, self.flow_limit, self.task_name, speed_flag=True)
        radar_data.run(self.data_queue, self.congestion_queue)

    def get_code(self, deveice_id):
        # 当前时间
        now_time = datetime.now()
        time_str = now_time.strftime("%Y%m%d%H%M")
        # 新的code字符串，待用，字符串组成“20250604085933”
        new_code = f"{time_str}{deveice_id}"
        # 秒级时间戳
        timestamp = now_time.timestamp()
        if deveice_id not in self.code_dict.keys():
            self.code_dict[deveice_id] = {"code": new_code, "timestamp": timestamp, "begin_time": timestamp}
        else:
            if self.code_dict[deveice_id]["timestamp"] + 120 < timestamp:
                self.code_dict[deveice_id]["code"] = new_code
                self.code_dict[deveice_id]["begin_time"] = timestamp
                self.code_dict[deveice_id]["timestamp"] = timestamp
            else:
                self.code_dict[deveice_id]["timestamp"] = timestamp
        return self.code_dict[deveice_id]["code"]

    def get_data(self):
        # 循环监听心跳，初始化时间
        last_time = time.time()
        while True:
            time.sleep(0.01)
            try:
                # kafka 服务器
                if self.kafka_producer is None:
                    logger.info(f"add_message {self.kafka_producer}")
                    try:
                        self.kafka_producer = KafkaProducer(
                            bootstrap_servers=self.config_data["kafka_host"],
                            key_serializer=lambda x: str(x).encode('utf-8'),
                            request_timeout_ms=10000,
                            value_serializer=lambda x: json.dumps(x, ensure_ascii=False).encode('utf-8')
                        )
                        self.kafka_status = 0
                        logger.info(f"kafka_producer连接成功")
                    except NoBrokersAvailable as e:
                        self.kafka_status = 1
                        logger.error("kafka连接失败")
                        self.kafka_producer = None
                        time.sleep(10)
                        continue
                # 心跳状态上报
                if time.time() - last_time >= 1:
                    m = {
                        "timestamp": str(int(time.time())),
                        "status": 0,
                        "type": 1
                    }
                    try:
                        future = self.kafka_producer.send(self.config_data["kafka_status"], value=m, key="key")
                        record_metadata = future.get(timeout=10)  # 正常情况下很快返回
                    except KafkaError as e:
                        self.kafka_status = 1
                        logger.error("kafka连接失败，导致算法心跳上报失败")
                        self.kafka_producer = None
                        time.sleep(10)
                    except Exception as e:
                        logger.error("算法心跳上报失败")
                    # 更新时间
                    last_time = time.time()

                while not self.congestion_queue.empty():
                    event_message = {}
                    message = self.congestion_queue.get(block=True)
                    print(f"时间：{int(time.time())}，设备 {message[0]} 的平均速度为{message[1]} km/h，密度为{message[2]}，小时流量为{message[3]}，事件等级为{message[4]}")
                    device_id = str(message[0])
                    seclevel = message[4]
                    code = self.get_code(device_id)

                    begin_time = self.code_dict[device_id]["begin_time"] * 1000
                    refresh_time = self.code_dict[device_id]["timestamp"] * 1000
                    if begin_time == refresh_time:
                        # 该事件第一次上报
                        event_message["timestamp"] = int(self.code_dict[device_id]["timestamp"])
                        event_message["globalCode"] = self.task_name
                        event_message["upCode"] = device_id
                        # event_message["downCode"] = device_id
                        event_message["secLevel"] = seclevel

                    # stake = self.device_info[device_id]["stake"]
                    # lane_ids = self.device_info[device_id]["laneIds"]
                    # begin_time = self.code_dict[device_id]["begin_time"] * 1000
                    # refresh_time = self.code_dict[device_id]["timestamp"] * 1000
                    # event_message["eventCode"] = code
                    # event_message["alertCategory"] = 101
                    # event_message["eventStake"] = stake
                    # event_message["beginTime"] = int(begin_time)
                    # event_message["laneIds"] = lane_ids
                    # event_message["level"] = message[3]
                    # event_message["deviceStakes"] = stake
                    # event_message["deviceId"] = device_id
                    # event_message["refreshTime"] = int(refresh_time)

                    if event_message:
                        self.kafka_producer.send(self.config_data["kafka_event_topic"], value=event_message, key="key")
                        logger.info(f"event_message: {event_message}")
            except Exception as e:
                logger.opt(exception=e).error("数据处理异常")


def main(config_data, task_name):
    data_monitor = DataMonitor(config_data, task_name)
    data_monitor.init()
    data_monitor.get_data()


if __name__ == "__main__":
    # 获取 main.py 所在目录
    dir_directory = os.path.dirname(os.path.abspath(__file__))
    # 构造 config.json 的路径
    config_file_path = os.path.join(dir_directory, 'config.json')
    # 检查文件是否存在
    if os.path.exists(config_file_path):
        print(f"找到了配置文件: {config_file_path}")
    else:
        print("未找到 config.json 文件")
    try:
        # 打开并读取 JSON 文件
        with open(config_file_path, 'r', encoding='utf-8') as file:
            data = json.load(file)
    except FileNotFoundError:
        print(f"Error: The file '{config_file_path}' was not found.")
    except json.JSONDecodeError:
        print("Error: The file contains invalid JSON.")
    except Exception as e:
        print(f"An unexpected error occurred: {e}")

    # json_path = os.path.join(r"D:\GJ\project\Network", "20250507-110000_20250507-130000.json")
    json_path = None

    topic_dict = data["ConfigData"]["kafka_cars_topic"]
    process_list = []
    for device, topic in topic_dict.keys():
        data["ConfigData"]["kafka_cars_topic"] = topic
        process = multiprocessing.Process(target=main, args=(data, device,))
        process.start()
        process_list.append(process)
    for process in process_list:
        process.join()
