package org.jeecg.modules.iot.mqtt.server;

import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken;
import org.eclipse.paho.client.mqttv3.MqttCallbackExtended;
import org.eclipse.paho.client.mqttv3.MqttMessage;
import org.jeecg.modules.iot.mqtt.config.MqttProperties;
import org.jeecg.modules.iot.mqtt.entity.MqttMessageJson;
import org.jeecg.modules.iot.mqtt.server.service.MessageHandlerService;
import org.jeecg.modules.iot.mqtt.server.service.RecvMessageHandlerService;
import org.jeecg.modules.iot.mqtt.server.service.processor.cabinet.CabinetProcessor;
import org.jeecg.modules.iot.mqtt.server.service.processor.socketOperate.ChargePileProcessor;
import org.jeecg.modules.iot.utils.JsonUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Component;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;

import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantLock;
import java.util.regex.Pattern;

@Slf4j
@Component
public class MqttKafKaConnectorAcceptCallback implements MqttCallbackExtended {

    private static final Logger logger = LoggerFactory.getLogger(MqttKafKaConnectorAcceptCallback.class);

    private final Map<String, KafkaProducer<String, String>> kafkaProducers = new HashMap<>();

    @Value("${spring.kafka.bootstrap-servers}")
    private String kafkaBootstrapServers;

    @Autowired
    private MqttProperties mqttProperties;

    @Autowired
    private MqttAcceptClient mqttAcceptClient;

    @Autowired
    private ChargePileProcessor chargePileProcessor;

    @Autowired
    private CabinetProcessor cabinetProcessor;

    @Autowired
    private MessageHandlerService messageHandlerService;

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate; // 使用 KafkaTemplate

    @Autowired
    private RecvMessageHandlerService recvMessageHandlerService;
    private final ObjectMapper objectMapper = new ObjectMapper();
    private final String deviceTopic = "device-status-topic";
    /*多线程处理*/
    private ExecutorService executorService = Executors.newCachedThreadPool(); // 创建一个固定大小的线程池

    private final ReentrantLock lock = new ReentrantLock();
    private Pattern sendTopicPattern; //  声明 Pattern 变量
    private Pattern deviceStatusPattern;
    private ScheduledFuture<?> reconnectFuture;
    private static final int MAX_RECONNECT_ATTEMPTS = 10;

    @PostConstruct
    public void init() {
        createKafkaProducer("default");

        // 在 PostConstruct 方法中初始化 sendTopicPattern
        String sendTopic = mqttProperties.getSendTopic();
        String sendTopicRegex = sendTopic.replace("+", "[^/]+").replace("#", ".+");
        sendTopicPattern = Pattern.compile(sendTopicRegex);
        deviceStatusPattern = Pattern.compile(mqttProperties.getDeviceStatus());

    }


    private final AtomicInteger reconnectAttempts = new AtomicInteger(0);
    private final ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor();


    @Override
    public void connectionLost(Throwable cause) {
        log.error("MQTT 连接断开，原因：{}", cause.getMessage(), cause);

        if (reconnectFuture == null || reconnectFuture.isDone()) {
            reconnectFuture = scheduler.scheduleAtFixedRate(() -> {
                if (reconnectAttempts.get() < MAX_RECONNECT_ATTEMPTS) { // 限制重连次数
                    if (!MqttAcceptClient.client.isConnected()) {
                        log.info("尝试重新连接 MQTT Broker...，第 {} 次尝试", reconnectAttempts.incrementAndGet());
                        mqttAcceptClient.reconnection();
                    } else {
                        reconnectAttempts.set(0); // 重置重连次数
                        reconnectFuture.cancel(false); // 取消定时任务
                        reconnectFuture = null;
                        log.info("MQTT 连接已恢复");
                    }
                } else {
                    log.error("超过最大重连次数，停止重连");
                    reconnectFuture.cancel(false); // 取消定时任务
                    reconnectFuture = null;
                }

            }, 0, 5, TimeUnit.SECONDS);
        }
    }

    @Override
    public void messageArrived(String topic, MqttMessage mqttMessage) throws Exception {

        executorService.submit(() -> { // 使用线程池异步处理消息
            String payload = new String(mqttMessage.getPayload(), StandardCharsets.UTF_8);
            try {

                // 1. 判断主题
                if (sendTopicPattern.matcher(topic).matches()) {
                    boolean isJson = JsonUtil.is808Json(payload);
                    if (isJson) {
                        MqttMessageJson mqttMessageJson = objectMapper.readValue(payload, MqttMessageJson.class);
                        String kafkaTopic = determineKafkaTopic(mqttMessageJson);
                        log.info("分发主题: {} for 终端ID/kafka Key: {}  ", kafkaTopic, mqttMessageJson.getId() ); // 打印日志
                        sendKafkaMessage(kafkaTopic, mqttMessageJson.getId(), payload);
                    }

                } else if (deviceStatusPattern.matcher(topic).matches()) {
                    sendKafkaMessage(deviceTopic, null, payload);
                }


          /*  payload =  extractJsonData(payload); // 提取 JSON 数据

            boolean isJson = JsonUtil.is808Json(payload);
            boolean isCabinetJson = JsonUtil.isCabinetJson(payload);
            boolean isChargePileJson = JsonUtil.isChargePileJson(payload);
            boolean isClientConnectJson = JsonUtil.isClientConnectJson(payload);


            if (isChargePileJson) {
                // ... (处理充电桩消息，与 MqttAcceptCallback 中的逻辑相同)
            } else if (isJson) {
                MqttMessageJson mqttMessageJson = objectMapper.readValue(payload, MqttMessageJson.class);
                String kafkaTopic = determineKafkaTopic(mqttMessageJson);
                sendKafkaMessage(kafkaTopic, mqttMessageJson.getId(), payload);


            } else if (isCabinetJson) {
                // ... (处理换电柜消息，与 MqttAcceptCallback 中的逻辑相同)
            } else if (isClientConnectJson) {
                // ... (处理客户端连接事件，与 MqttAcceptCallback 中的逻辑相同)
            }*/
            } catch (Exception e) {
                log.error("处理 MQTT 消息失败，topic: {}, payload: {}", topic, payload, e);
                // ... 错误处理 ...
            }

        });
    }

    public static boolean isTopicMatched(String topic, String wildcardTopic) {
        String regex = wildcardTopic.replace("+", "[^/]+").replace("#", ".+"); // 将 + 替换为 [^/]+，将 # 替换为 .+
        return Pattern.matches(regex, topic);
    }

    public void sendKafkaMessage(String topic, String key, String payload) {
        String defaultKey = "default_key"; // 定义默认的 key
        String actualKey = (key != null && !key.isEmpty()) ? key : defaultKey; // 使用 key 或默认 key
        ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send(topic, actualKey, payload);
        future.addCallback(new ListenableFutureCallback<SendResult<String, String>>() {
            @Override
            public void onSuccess(SendResult<String, String> stringStringSendResult) {
            }

            @Override
            public void onFailure(Throwable ex) {
            }
        });
    }


    public static boolean isTopicMatchedWithWildcard(String topic, String wildcardTopic) {
        String regex = wildcardTopic.replace("+", "[^/]+").replace("#", ".+"); // 将 + 替换为 [^/]+，将 # 替换为 .+
        return Pattern.matches(regex, topic);
    }

    private KafkaProducer<String, String> createKafkaProducer(String topic) {
        Properties kafkaProps = new Properties();
        kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServers);
        kafkaProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        kafkaProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        // 提高吞吐量：批量发送
        //kafkaProps.put(ProducerConfig.LINGER_MS_CONFIG, 100); //  等待 100ms，累积更多消息后再发送
        //kafkaProps.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384 * 2); //  增加批量大小
        kafkaProps.put(ProducerConfig.LINGER_MS_CONFIG, 0); //  不等待，立即发送
        kafkaProps.put(ProducerConfig.BATCH_SIZE_CONFIG, 0); //  禁用批量发送，每条消息都单独发送
        kafkaProps.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 67108864); // 例如，增加到 64MB
        kafkaProps.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "snappy"); //  启用压缩
        kafkaProps.put(ProducerConfig.ACKS_CONFIG, "1"); //  只需要 Leader 副本确认
        kafkaProps.put(ProducerConfig.RETRIES_CONFIG, 3); //  重试 3 次
        // 调整其他生产者配置以优化性能，例如 acks, retries, buffer.memory 等


        return new KafkaProducer<>(kafkaProps);
    }


    // 根据 MQTT 消息类型确定 Kafka Topic
    private String determineKafkaTopic(MqttMessageJson mqttMessageJson) {
        String cmd = mqttMessageJson.getCmd();
        String topic;

        switch (cmd) {
            case "0200":
                topic = "location-topic-can";
                break;
            case "0100":
                topic = "login-topic";
                break;
            case "0900":
                topic = "transparent-topic";
                break;
            case "0107":
                topic = "device-topic";
                break;
            // ... 其他 cmd 类型 ...
            default:
                topic = "default-topic";
        }


        return topic;
    }

    @Override
    public void connectComplete(boolean reconnect, String serverURI) {
        //logger.info("============================= 客户端【" + MqttAcceptClient.client.getClientId() + "】连接成功！=============================");
        // 以/#结尾表示订阅所有以test开头的主题
        // 订阅所有机构主题
        mqttAcceptClient.subscribe(mqttProperties.getDefaultTopic(), 0);
        mqttAcceptClient.subscribe(mqttProperties.getRecvTopic(), 1);
        mqttAcceptClient.subscribe(mqttProperties.getSendTopic(), 1);
        mqttAcceptClient.subscribe(mqttProperties.getZnczTopic(), 1);
        mqttAcceptClient.subscribe(mqttProperties.getCabinetTopic(), 1);
        mqttAcceptClient.subscribe(mqttProperties.getDeviceStatus(), 1);
    }


    @Override
    public void deliveryComplete(IMqttDeliveryToken token) {
        String[] topics = token.getTopics();
        for (String topic : topics) {
            logger.info("向主题【" + topic + "】发送消息成功！");
        }
        try {
            MqttMessage message = token.getMessage();
            byte[] payload = message.getPayload();
            String s = new String(payload, "UTF-8");
            logger.info("【消息内容】:" + s);
        } catch (Exception e) {
            logger.error("MqttAcceptCallback deliveryComplete error,message:{}", e.getMessage());
            e.printStackTrace();
        }
    }


    @PreDestroy
    public void close() {
        kafkaProducers.values().forEach(KafkaProducer::close);
    }


    public static String extractJsonData(String input) {
        try {
            int startIndex = input.indexOf("{");
            int endIndex = input.lastIndexOf("}") + 1;
            return input.substring(startIndex, endIndex);
        } catch (Exception e) {
            // 处理异常情况
            System.out.println("Error extracting JSON data: " + e.getMessage());
            return null;
        }
    }
}