package dev_ops.tools.kafka;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.Properties;
import java.util.Random;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.dev_common.file.FileUtils;
import org.dev_common.func.Functions;

/**
 * TODO：从topic 指定时间段查询到 partition+offset，进而查询该时间段的kafka事件
 * 1）Flume转发事件是否到达，到达时间分析
 * 2）add to spark/storm作消息延迟质量分析
 */
public class CheckTimestamp {
    public static final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS");
    public static void main(String[] args) throws ParseException {
        long time =  format.parse("2017-12-05 08:24:00,000").getTime();
        Properties props = newConsumer_envTest();
        KafkaConsumer<String, String> consumer = null;
        try {
            consumer = new KafkaConsumer<String, String>(props);
            
            // TopicPartition tp = new TopicPartition("bizType-user",14);
            // consumer.assign(Arrays.asList(tp));
            // consumer.offsetsForTimes(timestampsToSearch)
            
            TopicPartition tp = new TopicPartition(
                    // "bizType-user", 1
                    // "zhangxin-server", 14
                    "zhangxin-server", 0
                    );
            consumer.assign(Arrays.asList(tp));
            consumer.seek(tp, 13936335L);//12451 11037408  13982280 
            //14. 235877569 238119383 240327206
            //consumer.seekToEnd(Arrays.asList(tp));
            System.out.println(consumer.metrics());
            
            // consumer.subscribe(Arrays.asList("bizType-user"/*"bizType-media", "bar","zhangxin-server"*/));
            //consumer.subscribe(Arrays.asList("zhangxin-server"));
            Random rand = new Random();
            while (true) {
                try {
                    Thread.sleep(100);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
                ConsumerRecords<String, String> records = consumer.poll(10);
                System.out.println(records.count());
                for (ConsumerRecord<String, String> record : records) {
                    // if (record.timestamp() > time && record.key().equals("auth.login")) {
                    //if (record.timestamp() > time && record.key().equals("user")) {
                    if (record.timestamp() > time && record.key().startsWith("cm")) {
                        String cnt = String.format(">>>offset = %s, key = %s, ts:%s, value:%s\n", record.offset(), record.key(), 
                        format.format(new Date(record.timestamp())), record.value());
                        System.out.println(record.offset());
                        if (record.value().contains("auth.login")) {
                            System.out.printf(cnt);
                            FileUtils.appendFile(cnt, "origin");
                        }
                        if (record.timestamp() > 0) {
                            Functions.sleep(1_00);
                        }
                    }else if (rand.nextInt(records.count()) == 10) {
                        System.out.printf("offset = %d, key = %s, ts:%s\n", record.offset(), record.key(),
                                format.format(new Date(record.timestamp())));
                    }
                }
            }
            
        } finally {
            if (null != consumer) {
                consumer.close();
            }
        }
    }

    public static Properties newConsumer_envTest() {
        Properties props = newCommonConsumer();
        //props.put("bootstrap.servers", "10.136.24.76:9091");
        props.put("bootstrap.servers", "10.135.28.6:9091");
        // props.put("bootstrap.servers", "10.135.28.4:9091");
        // props.put("group.id", "groupx-zhangxin-server");
        //props.put("group.id", "zhangxin-server123");
        return props;
    }
    public static Properties newCommonConsumer() {
        Properties props = new Properties();
        // props.put("enable.auto.commit", "true");
        // props.put("auto.commit.interval.ms", "1000");
        props.put("session.timeout.ms", "30000");
        props.put("key.deserializer", KafkaUtils.String_deser);
        props.put("value.deserializer", KafkaUtils.String_deser);
        return props;
    }
}
