package com.kafka_java;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

public class Consumer {
    public static void main(String[] args) {
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "8.130.34.5:32000");
        props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.setProperty("enable.auto.commit", "false"); // 手动提交偏移量
        props.setProperty("auto.offset.reset", "latest");
        props.setProperty("group.id", "test");

        ObjectMapper objectMapper = new ObjectMapper();
        KafkaConsumer<String, String> consumer = null;

        try {
            consumer = new KafkaConsumer<>(props);
            TopicPartition partition0 = new TopicPartition("test", 0);
            consumer.assign(Collections.singletonList(partition0));
            consumer.seek(partition0, 0);

            if (!isKafkaConnected(consumer)) {
                throw new RuntimeException("Failed to connect to Kafka. Please check your connection settings.");
            }

            boolean running = true;
            while (running) {
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(500));
                if (records.isEmpty()) {
//                    System.out.println("No records found. Waiting for new messages...");
                } else {
                    for (ConsumerRecord<String, String> record : records) {
                        System.out.printf("Offset: %d, Key: %s%n", record.offset(), record.key());
                        try {
                            // 将消息内容解析为JSON对象
                            JsonNode jsonNode = objectMapper.readTree(record.value());
                            String formattedJson = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(jsonNode);
                            System.out.println("Formatted JSON message:\n" + formattedJson);
                        } catch (IOException e) {
                            System.err.println("Error parsing JSON message: " + e.getMessage());
                            saveRawMessageToFile(record.value());
                        }
                    }
                    consumer.commitSync(); // 手动提交偏移量
                }
            }
        } catch (Exception e) {
            System.err.println("Kafka connection error: " + e.getMessage());
            e.printStackTrace();
        } finally {
            if (consumer != null) {
                consumer.close();
                System.out.println("Kafka consumer closed.");
            }
        }
    }

    private static boolean isKafkaConnected(KafkaConsumer<String, String> consumer) {
        try {
            consumer.listTopics(Duration.ofSeconds(10));
            return true;
        } catch (Exception e) {
            System.err.println("Failed to connect to Kafka: " + e.getMessage());
            return false;
        }
    }

    private static void saveRawMessageToFile(String rawMessage) {
        try {
            Files.write(Paths.get("failed_message.json"),
                    (rawMessage + System.lineSeparator()).getBytes(),
                    StandardOpenOption.CREATE, StandardOpenOption.APPEND);
            System.out.println("Failed JSON message saved to failed_message.json");
        } catch (IOException e) {
            System.err.println("Error saving raw message to file: " + e.getMessage());
        }
    }
}
