import json
import os
import queue
import threading
import time
from datetime import datetime, timedelta
from kafka import KafkaConsumer, KafkaProducer
from config import ConfigData
import concurrent.futures


def get_immediate_folders(directory):
    """
    获取指定目录下的所有非递归文件夹名称
    :param directory: 指定的目录路径
    :return: 包含所有非递归文件夹名称的列表
    """
    folders = []
    for entry in os.scandir(directory):
        if entry.is_dir():
            folders.append(entry.name)
    return folders


def get_immediate_json_files(directory):
    """
    获取指定目录下的所有非递归 JSON 文件名称
    :param directory: 指定的目录路径
    :return: 包含所有非递归 JSON 文件名称的列表
    """
    json_files = []
    for entry in os.scandir(directory):
        if entry.is_file() and entry.name.endswith('.json'):
            json_files.append(entry.name)
    return json_files


def read_json_data(jsonl_file_path):
    """
    读取 JSON Lines 文件中的所有数据
    :param jsonl_file_path: JSON Lines 文件的路径
    :return: 包含所有 JSON 对象的列表
    """
    all_data = []
    try:
        with open(jsonl_file_path, 'r', encoding='utf-8') as file:
            for line in file:
                try:
                    # 解析每行 JSON 数据
                    data = json.loads(line.strip())
                    all_data.append(data)
                except json.JSONDecodeError:
                    print(f"解析行 {line} 时出错。")
    except FileNotFoundError:
        print(f"文件 {jsonl_file_path} 未找到。")
    return all_data


def filter_data_by_time(data, start_time_str, end_time_str, time_format="%Y-%m-%d %H:%M:%S"):
    """
    根据指定的时间范围筛选数据
    :param data: 包含所有 JSON 数据的列表
    :param start_time_str: 开始时间的字符串表示
    :param end_time_str: 结束时间的字符串表示
    :param time_format: 时间字符串的格式，默认为 "%Y-%m-%d %H:%M:%S"
    :return: 符合时间范围的字典列表
    """
    start_time = datetime.strptime(start_time_str, time_format)
    end_time = datetime.strptime(end_time_str, time_format)
    last_time = start_time
    filtered_data = []
    for item in data:
        time_str = item.get("timestamp")
        if time_str:
            item_time = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%f")
            if start_time <= item_time <= end_time:
                filtered_data.append(item)
                if item_time >= last_time:
                    last_time = item_time
    return filtered_data, last_time


class OfflineData:
    def __init__(self, target_directory):
        self.kafak_host = ConfigData["kafka_host"]
        self.kafka_topic = ConfigData["kafka_monitor_topic"]
        self.kafka_send_topic = ConfigData["kafka_event_index_history_topic"]
        self.gantry_info = ConfigData["gantry_info"]
        self.kafka_consumer = KafkaConsumer(
            self.kafka_topic,
            bootstrap_servers=self.kafak_host,
            auto_offset_reset='latest',
            enable_auto_commit=False
        )
        self.kafka_producer = KafkaProducer(
            bootstrap_servers=self.kafak_host,
            key_serializer=str.encode,
            value_serializer=lambda x: json.dumps(x).encode('utf-8')
        )
        self.thread_flag = True
        self.q = queue.Queue(maxsize=10)
        # 实时接收离线时间范围
        threading.Thread(target=self.subscribe_data_kafka).start()

        self.folder_names = get_immediate_folders(target_directory)

        self.all_files_data = []
        self.status = None
        self.fileName = None

        self.current_time = None

    def subscribe_data_kafka(self):
        # 将消费者的偏移量设置为最后一条消息的偏移量
        while True:
            time.sleep(0.02)
            for message in self.kafka_consumer:
                if message:
                    try:
                        message_value = message.value.decode('utf-8')
                        time_info = json.loads(message_value)
                        print("接收数据", time_info)
                        self.q.put(time_info, block=False)
                    except Exception as e:
                        print("解析数据失败！！！")
            # 退出线程
            if not self.thread_flag:
                break

    def run(self):
        sn_list = ['TLS_1_1', 'TLS_1_2', 'TLS_2_1', 'TLS_2_2', 'HW7', 'ZW3', 'HW3', 'ZW4']
        while True:
            time.sleep(1.0)
            while not self.q.empty():
                print(time.time())
                message = self.q.get(timeout=0.1)
                print(message)
                if message:
                    if message["dataResource"] == 1 and "fileName" in message.keys() and "status" in message.keys():
                        fileName = message["fileName"]
                        if self.fileName != fileName:
                            self.all_files_data = []
                            self.fileName = fileName
                            total_file_name = os.path.join(target_directory, fileName, "event_data.json")
                            self.all_files_data = read_json_data(total_file_name)
                        if self.all_files_data and message["status"] == 0:
                            print("开始发送数据", time.time())
                            for item in self.all_files_data:
                                # new_item = {"timeStamp": item["timeStamp"], "snList": []}
                                snlist = []
                                if "snList" in item.keys():
                                    for sn in item["snList"]:
                                        if sn["sn"] in sn_list:
                                            snlist.append(sn)
                                new_item = {"timeStamp": item["timeStamp"], "snList": snlist}
                                self.kafka_producer.send(self.kafka_send_topic, value=new_item, key="key")
                            print("发送完成", time.time())


if __name__ == '__main__':
    target_directory = '/data/project/eventCheck/file/localFile'  # 当前目录，可替换为实际目录路径
    offline_data = OfflineData(target_directory)
    offline_data.run()