package com.xctech.yace.service;

import com.alibaba.fastjson.JSONObject;
import lombok.SneakyThrows;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndTimestamp;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;

public class KafkaCosumerByTime {

    static KafkaConsumer<String, String> consumer;

    //将时间转换为时间戳
    private static Long dateToStamp(String s) throws ParseException {
        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date date = simpleDateFormat.parse(s);
        return date.getTime();
    }

    @SneakyThrows
    public static void main(String[] args) {
        String topic = "TRADE_O32-249";
        long fetchStartTime = dateToStamp("2022-05-06 00:00:00");
        long fetchEndTime= dateToStamp("2022-05-07 23:59:59");

        //kafkaConsumer
        Properties props = new Properties();
        props.put("bootstrap.servers", " 192.168.0.110:9092");  //连接地址
        props.put("group.id", "test");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        consumer = new KafkaConsumer(props);

        //根据时间段及过滤条件获取指定数据
        getMsgByTime(topic,fetchStartTime,fetchEndTime);
        System.out.println("finish!");
    }

    private static void getMsgByTime(String topic,long fetchStartTime,long fetchEndTime){
        //根据起始时间获取每个分区的起始offset
        Map<TopicPartition, Long> map = new HashMap();
        List<PartitionInfo> partitions = consumer.partitionsFor(topic);
        for (PartitionInfo par : partitions) {
            map.put(new TopicPartition(topic, par.partition()), fetchStartTime);
        }
        Map<TopicPartition, OffsetAndTimestamp> parMap = consumer.offsetsForTimes(map);

        //遍历每个分区，将不同分区的数据写入不同文件中
        boolean isBreak = false;
        for (Map.Entry<TopicPartition, OffsetAndTimestamp> entry : parMap.entrySet()) {
            TopicPartition key = entry.getKey();
            OffsetAndTimestamp value = entry.getValue();

            //根据消费里的timestamp确定offset
            if (value != null) {
                long offset = value.offset();
                consumer.assign(Arrays.asList(key));//订阅主题中指定的分区key.partition()
                consumer.seek(key, offset);
            }

            //拉取消息
            isBreak = false;
            while (true) {
                ConsumerRecords<String, String> poll = consumer.poll(100);
                for (ConsumerRecord<String, String> record : poll) {
                    if (record.timestamp() <= fetchEndTime){
                        JSONObject json = (JSONObject) JSONObject.parse(record.value());
                        if(json.get("xid") == null){
                            System.out.println("key = "+record.key()+",offset ="+record.offset()+",time = "+record.timestamp()+"xid没有");
                        }else{
                            System.out.println("key = "+record.key()+",offset ="+record.offset()+",time = "+record.timestamp()+"xid ="+json.get("xid"));
                        }

                    }else if(record.timestamp()>fetchEndTime){
                        isBreak=true;
                    }
                }
                if(isBreak){
                    break;
                }
            }
        }
    }

}
