package kafka.devops;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndTimestamp;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.regex.Pattern;

/**
 * 目的：
 * 去捞取指定时间范围内，匹配对应正则的消息
 * <p>
 * 打包命令：
 * mvn compile assembly:single -DskipTests
 * <p>
 * 运行命令：
 * java -jar kafka-devops-tjs.jar "{\"bootstrap\":\"localhost:9092\",\"endTime\":\"2022-02-08 14:00:00\",\"regx\":\".*(4).*(a6).*\",\"startTime\":\"2022-02-08 10:00:00\",\"topic\":\"student\"}"
 */
public class DoMain {

    public static void main(String[] args) {
        MainParams mainParams = MainParams.initMainParams(args);
        KafkaConsumer<String, String> consumer = getKafkaConsumer(mainParams);

        try {
            // 重置分区的消费位点至StartTime
            seekOffsetsForTimes(mainParams, consumer);

            System.out.println("\n开始检索目标数据...");
            long endTimestamp = getTimestamps(mainParams.getEndTime());
            int resNum = 0;
            stop:
            while (true) {
                ConsumerRecords<String, String> records = consumer.poll(1000);
                if (records.isEmpty()) {
                    System.out.println("检索目标数据，没有更多数据了...");
                    // break stop;
                }
                for (ConsumerRecord<String, String> record : records) {
                    if (record.timestamp() > endTimestamp) {
                        System.out.println("结束检索目标数据，达到结束时间 = " + getDateStr(record.timestamp()) + ", data = " + record.value());
                        break stop;
                    }
                    if (Pattern.matches(mainParams.getRegx(), record.value().replaceAll("\\n",""))) {
                        System.out.println(
                                "time = " + getDateStr(record.timestamp()) + "（" + record.timestamp() + "）" +
                                        ", partition = " + record.partition() + ", offset = " + record.offset() +
                                        ",\n data = " + record.value());

                        if (++resNum > mainParams.getMaxRow()) {
                            System.out.println("结束检索目标数据，达到最大检索行数 maxRow= " + mainParams.getMaxRow());
                            break stop;
                        }
                    }
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            consumer.close();
        }
    }

    private static void seekOffsetsForTimes(MainParams mainParams, KafkaConsumer<String, String> consumer) throws ParseException {
        // 获取topic的partition信息
        System.out.println("开始获取主题所有分区...");
        List<PartitionInfo> partitionInfos = consumer.partitionsFor(mainParams.getTopic());
        List<TopicPartition> topicPartitions = new ArrayList<>();
        Map<TopicPartition, Long> timestampsToSearch = new HashMap<>();
        for (PartitionInfo partitionInfo : partitionInfos) {
            topicPartitions.add(new TopicPartition(partitionInfo.topic(), partitionInfo.partition()));
            timestampsToSearch.put(new TopicPartition(partitionInfo.topic(), partitionInfo.partition()),
                    getTimestamps(mainParams.getStartTime()));
        }
        consumer.assign(topicPartitions);

        // 重置分区的消费位点至StartTime fixme 关键方法：offsetsForTimes
        Map<TopicPartition, OffsetAndTimestamp> map = consumer.offsetsForTimes(timestampsToSearch);
        OffsetAndTimestamp offsetTimestamp = null;
        System.out.println("开始设置各分区起始偏移量...");
        for (Map.Entry<TopicPartition, OffsetAndTimestamp> entry : map.entrySet()) {
            // 如果设置的查询偏移量的时间点大于最大的索引记录时间，那么value就为空
            offsetTimestamp = entry.getValue();
            if (offsetTimestamp != null) {
                int partition = entry.getKey().partition();
                long timestamp = offsetTimestamp.timestamp();
                long offset = offsetTimestamp.offset();
                System.out.println("partition = " + partition +
                        ", time = " + getDateStr(timestamp) +
                        ", offset = " + offset);
                // 设置读取消息的偏移量
                consumer.seek(entry.getKey(), offset);
            }
        }
    }

    private static KafkaConsumer<String, String> getKafkaConsumer(MainParams mainParams) {
        Properties props = new Properties();
        props.put("bootstrap.servers", mainParams.getBootstrap());
        props.put("group.id", "test_check_error_data");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        return new KafkaConsumer<>(props);
    }

    public static String getDateStr(long timestamps) {
        return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date(timestamps));
    }

    public static long getTimestamps(String dateStr) throws ParseException {
        return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(dateStr).getTime();
    }
}
