package com.kafka_java;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

public class Bothcandp {

    private static final Logger logger = LoggerFactory.getLogger(Bothcandp.class);
    private static final ObjectMapper objectMapper = new ObjectMapper();

    public static void main(String[] args) {
        // 配置消费者属性
        Properties consumerProps = new Properties();
        consumerProps.setProperty("bootstrap.servers", "8.130.34.5:32000");
        consumerProps.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        consumerProps.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        consumerProps.setProperty("enable.auto.commit", "false");
        consumerProps.setProperty("auto.offset.reset", "latest");
        consumerProps.setProperty("group.id", "test");

        // 配置生产者属性
        Properties producerProps = new Properties();
        producerProps.setProperty("bootstrap.servers", "8.130.34.5:32000");
        producerProps.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        producerProps.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        producerProps.setProperty(ProducerConfig.ACKS_CONFIG, "all");
        producerProps.setProperty(ProducerConfig.RETRIES_CONFIG, "3");
        producerProps.setProperty(ProducerConfig.LINGER_MS_CONFIG, "10");
        producerProps.setProperty(ProducerConfig.COMPRESSION_TYPE_CONFIG, "gzip");
        producerProps.setProperty(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "1");

        try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(consumerProps);
             KafkaProducer<String, String> producer = new KafkaProducer<>(producerProps)) {

            TopicPartition partition0 = new TopicPartition("test", 0);
            consumer.assign(Collections.singletonList(partition0));
            consumer.seek(partition0, 0);

            if (!isKafkaConnected(consumer)) {
                throw new RuntimeException("Failed to connect to Kafka. Please check your connection settings.");
            }

            logger.info("Kafka consumer connected and ready to process messages.");

            while (true) {
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(500));
                if (!records.isEmpty()) {
                    for (ConsumerRecord<String, String> record : records) {
                        logger.debug("Received message with offset: {}, key: {}", record.offset(), record.key());
                        try {
                            // 解析 JSON 消息
                            JsonNode jsonNode = objectMapper.readTree(record.value());
                            JsonNode scopeLogs = jsonNode.path("resourceLogs").get(0).path("scopeLogs");

                            scopeLogs.forEach(scopeLog -> {
                                String scopeName = scopeLog.path("scope").path("name").asText();
                                JsonNode logRecords = scopeLog.path("logRecords");

                                logRecords.forEach(logRecord -> {
                                    String body = logRecord.path("body").path("stringValue").asText();
                                    String traceId = logRecord.path("traceId").asText();
                                    String spanId = logRecord.path("spanId").asText();

                                    String newMessage = scopeName + " " + body;
                                    ProducerRecord<String, String> producerRecord = new ProducerRecord<>("new", record.key(), newMessage);

                                    producer.send(producerRecord, (metadata, exception) -> {
                                        if (exception != null) {
                                            logger.error("Failed to send message to new topic: {}", exception.getMessage());
                                        } else {
                                            logger.info("Message sent to new: partition {}, offset {}", metadata.partition(), metadata.offset());
                                        }
                                    });
                                });
                            });

                        } catch (IOException e) {
                            logger.error("Error parsing JSON message: {}", e.getMessage());
                            saveRawMessageToFile(record.value());
                        }
                    }
                    consumer.commitSync();
                }
            }
        } catch (Exception e) {
            logger.error("Exception in Kafka ConsumerProducer: {}", e.getMessage(), e);
        }
    }

    private static boolean isKafkaConnected(KafkaConsumer<String, String> consumer) {
        try {
            consumer.listTopics(Duration.ofSeconds(10));
            return true;
        } catch (Exception e) {
            logger.error("Failed to connect to Kafka: {}", e.getMessage());
            return false;
        }
    }

    private static void saveRawMessageToFile(String rawMessage) {
        try {
            Files.write(Paths.get("failed_message.json"), (rawMessage + System.lineSeparator()).getBytes(), StandardOpenOption.CREATE, StandardOpenOption.APPEND);
            logger.info("Failed JSON message saved to failed_message.json");
        } catch (IOException e) {
            logger.error("Error saving raw message to file: {}", e.getMessage());
        }
    }
}
