package com.xctech.yace.thread;

import com.xctech.yace.enumeration.KafkaRoleType;
import com.xctech.yace.single.RecivedDataManger;
import com.xctech.yace.util.DirUtil;
import com.xctech.yace.util.KafkaKerberosUtil;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndTimestamp;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;

import java.io.FileInputStream;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;

import static com.xctech.yace.util.KafkaKerberosUtil.getKafkaSaslPlainConfMap;

public class ConsumerThread implements Runnable{

    public static AtomicInteger count = new AtomicInteger(0);

    private KafkaConsumer<String,String> consumer;

    private ConsumerRecords<String, String> msgs;

    private final String[] topics;

    private Properties props;

//    private Integer partition;

    //时间格式
    private SimpleDateFormat sdf;

//    private String mode;

//    private String timeRange;
//    private long startTimestamp;
//    private long endTimestamp;

//    private String mainKey;

    public ConsumerThread(KafkaConsumer consumer,String[] topicName,Integer partition) {
        this.consumer = consumer;
        this.topics = topicName;

        String filePath = DirUtil.getOrgPath()+"conf/kafkaconsumer.properties";
        props = new Properties();
        Properties newProps = new Properties();
        try {
            props.load(new FileInputStream(filePath));
            Map<String, String> map = getKafkaSaslPlainConfMap(props, KafkaRoleType.CONSUMER);
            if(map != null){
                for(Map.Entry<String,String> entry:map.entrySet()){
                    String key = entry.getKey();
                    String value = entry.getValue();
                    props.setProperty(key,value);
                }
            }else{
                String mechanism = KafkaKerberosUtil.mechanism;
                String protocol = KafkaKerberosUtil.protocol;
                for (String field:props.stringPropertyNames()){
                    if (!(mechanism.equals(field) || protocol.equals(field))) {
                        newProps.setProperty(field,props.getProperty(field,""));
                    }
                }
                props = newProps;
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
        this.sdf = new SimpleDateFormat("yyyyMMdd HH:mm:ss");
//        this.mode = props.getProperty("mode");
//        if(!"-2".equals(mode)){
//            this.consumer.subscribe(Arrays.asList(topicName));
//        }else{
//            this.timeRange = props.getProperty("rollback.timerange");
//            String[] split = timeRange.split("-");
//            String beginTime = split[0].trim();
//            String endTime = split[1].trim();
//
//            try {
//                startTimestamp = sdf.parse(beginTime).getTime();
//                endTimestamp = sdf.parse(endTime).getTime();
//            } catch (ParseException e) {
//                e.printStackTrace();
//            }
//            this.mainKey = props.getProperty("main.key");
//            this.partition = partition;
//        }
    }

//    private void getMsgByTime(String topic,long fetchStartTime,long fetchEndTime){
//        //根据起始时间获取每个分区的起始offset
//        Map<TopicPartition, Long> map = new HashMap();
//        List<PartitionInfo> partitions = consumer.partitionsFor(topic);
//        for (PartitionInfo par : partitions) {
//            map.put(new TopicPartition(topic, par.partition()), fetchStartTime);
//        }
//        Map<TopicPartition, OffsetAndTimestamp> parMap = consumer.offsetsForTimes(map);
//
//        //遍历每个分区，将不同分区的数据写入不同文件中
//        boolean isBreak = false;
//        for (Map.Entry<TopicPartition, OffsetAndTimestamp> entry : parMap.entrySet()) {
//            TopicPartition key = entry.getKey();
//            OffsetAndTimestamp value = entry.getValue();
//
//            //根据消费里的timestamp确定offset
//            if (value != null) {
//                long offset = value.offset();
//                consumer.assign(Arrays.asList(key));//订阅主题中指定的分区key.partition()
//                consumer.seek(key, offset);
//            }
//            if(key.partition() != this.partition){
//                continue;
//            }
//            //拉取消息
//            isBreak = false;
//            int i = 0;
//            while (true) {
//                ConsumerRecords<String, String> poll = consumer.poll(100);
//                for (ConsumerRecord<String, String> record : poll) {
//                    if(i % 5000 == 0){
//                        System.out.println("正在消费第"+i+"笔,偏移量:"+record.partition()+"|"+record.offset()+",每5000笔输出一次...");
//                        i = 0;
//                    }
//                    i++;
//                    if (record.timestamp() <= fetchEndTime){
//                        if(record.value().contains(mainKey)){
//                            RecivedDataManger<ConsumerRecord<String,String>> instance = RecivedDataManger.getInstance();
//                            //存数据到集合中
//                            instance.push(record);
//                        }
//                    }else if(record.timestamp()>fetchEndTime){
//                        isBreak = true;
//                    }
//                }
//                if(isBreak){
//                    break;
//                }
//            }
//        }
//    }

    @Override
    public void run() {
        System.out.println("-----------开始消费-------------");
//        if("-2".equals(mode)){
//            //根据时间段及过滤条件获取指定数据
//            getMsgByTime(topics[0],startTimestamp,endTimestamp);
//        }else{
            try{
                for(;;){
                    msgs = consumer.poll(1);
                    RecivedDataManger<ConsumerRecord<String,String>> instance = RecivedDataManger.getInstance();
                    if(null != msgs && msgs.count() > 0){
                        for (ConsumerRecord<String,String> record:msgs){
//                        System.out.println("=======receive: key = " + record.key() +
//                                ", value = " + record.value()+" offset==="+record.offset());
                            //2.计算间隔
                            System.out.println("=======receive: key = "+record.value());
//                        Long endTime = TimeUtil.getNaTime();
//                        Long startTime = Long.parseLong(record.key());
//                        System.out.println("=======receive: time = "+"开始时间："+startTime+"|"+"结束时间："+endTime+"耗时："+TimeUtil.calInterval(startTime,endTime));
                            System.out.println("=======receive: time 耗时："+ record.key()+"ms");

                            //存数据到集合中
                            instance.push(record);
                        }
                    }else {
                        Thread.sleep(10);
                    }
                }
            }catch (Exception e){
                e.printStackTrace();
            } finally {
//            consumer.close();
            }
//        }
    }





}
