import json
import os
import sys
import threading
import time
from loguru import logger
from kafka import KafkaProducer

dir = os.path.dirname(os.path.abspath(__file__))
log_dir = os.path.join(dir, "Log", "logs")
if not os.path.exists(log_dir):
    os.makedirs(log_dir)
# 自定义日志格式
logger.remove()  # 移除默认的日志处理器
logger.add(sink=sys.stdout, format="{time:YYYY-MM-DD HH:mm:ss.SSS} | {level} | {message}", level="INFO")
# logger.add(
#     sink=os.path.join(log_dir, "clean_{time:YYYY-MM-DD}.log"),  # 日志文件名带日期
#     format="{time:YYYY-MM-DD HH:mm:ss.SSS} | {level} | {function}:{line} - {message}",
#     rotation="00:00",           # 每天午夜轮转（与大小轮转二选一）
#     retention="360 days",       # 保留最近360天的日志
#     encoding="utf-8",           # 避免中文乱码
#     level="INFO",               # 文件日志级别
# )


def kafka_send(bootstrap_servers, topic, json_file_path):
    # 优化生产者配置
    producer = KafkaProducer(
        bootstrap_servers=bootstrap_servers,
        value_serializer=lambda x: json.dumps(x).encode('utf-8')
    )

    time_diff = 0
    messages_sent = 0

    try:
        with open(json_file_path, 'r', encoding='utf-8') as f:
            for line in f:
                try:
                    msg = json.loads(line.strip())
                    if time_diff == 0:
                        time_diff = int(time.time() * 1000) - int(msg["timestamp"])

                    # 发送消息并获取future对象0
                    producer.send(topic, value=msg["value"])

                    # 可选：等待消息发送确认（会降低吞吐量）
                    # future.get(timeout=10)

                    messages_sent += 1

                    # 计算时间延迟
                    time_delay = (int(msg["timestamp"]) + time_diff) - int(time.time() * 1000)
                    print(f"时间延迟: {topic},{time_delay} ms, {int(time.time() * 1000)}, {int(msg['timestamp'])}, {time_diff}")
                    if time_delay > 0:
                        time.sleep(time_delay / 1000)

                except json.JSONDecodeError as e:
                    logger.error(f"JSON解析错误: {e}, 行内容: {line[:50]}...")
                except Exception as e:
                    logger.error(f"发送消息时发生错误: {e}")

    except FileNotFoundError:
        logger.error(f"文件未找到: {json_file_path}")
    except Exception as e:
        logger.error(f"处理文件时发生未知错误: {e}")
    # finally:
    #     try:
    #         # 确保生产者缓冲区中的所有消息都被发送
    #         producer.flush(timeout=30)  # 增加超时时间
    #     except KafkaTimeoutError:
    #         logger.warning("生产者flush超时")
    finally:
        # 关闭生产者，释放资源
        producer.close(timeout=10)
        logger.info(f"生产者已关闭，主题: {topic}, 发送消息总数: {messages_sent}")


if __name__ == '__main__':
    bootstrap_servers = "10.100.12.72:9092"
    topics_and_files = [
        ("e1_data_nq01", r"./temp_data/data/e1_data_nq01_1753351200000_1753401600000_processed.json"),
        # ("e1_data_na02", r"./temp_data/data/e1_data_na02_1753225200000_1753254000000_processed.json"),
        ("kako_data", r"./temp_data/data/kako_data_1753351200000_1753401600000_processed.json"),
        ("submitE3Frame", r"./temp_data/data/submitE3Frame_1753351200000_1753401600000_processed.json")
    ]

    threads = []

    # 创建并启动线程
    for topic, json_file in topics_and_files:
        thread = threading.Thread(target=kafka_send, args=(bootstrap_servers, topic, json_file))
        threads.append(thread)
        thread.start()
        logger.info(f"启动线程: {topic}")
        time.sleep(0.5)  # 简单的启动间隔，减少初始压力

    # 等待所有线程完成
    for thread in threads:
        thread.join()

    logger.info("所有线程已完成")