package com.zmn.oms.task.kafka.conf;

import com.alibaba.fastjson.JSON;
import com.zmn.oms.task.kafka.listener.DtsKafkaListener;
import com.zmn.oms.task.kafka.process.RecordProcessService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndTimestamp;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import javax.annotation.PreDestroy;
import java.util.*;
import java.util.concurrent.Executor;

/**
 * 类描述：订单数据变化订阅
 *
 * @author lujia
 * @date 2018/12/05 14:08
 */
@Slf4j
@Configuration
@EnableConfigurationProperties(DtsKafkaConsumerProperties.class)
@ConditionalOnExpression("'${elastic.sync.type}'.equalsIgnoreCase('kafka')")
public class DtsKafkaConsumerAutoConfiguration {

    private final Properties props;
    private final String serializer = "org.apache.kafka.common.serialization.StringDeserializer";
    private final String deserializer = "org.apache.kafka.common.serialization.ByteArrayDeserializer";
    private final String jaasTemplate = "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"%s-%s\" password=\"%s\";";

    private KafkaConsumer<String, byte[]> kafkaConsumer;
    private DtsKafkaConsumerProperties properties;

    public DtsKafkaConsumerAutoConfiguration(DtsKafkaConsumerProperties properties) {
        this.properties = properties;
        this.props = new Properties();

        log.info("#oms#dts#kafka conf : {}", JSON.toJSONString(properties));

        String brokers = properties.getDtsConnectIp() + ":" + properties.getDtsConnectPort();
        String jaasCfg = String.format(jaasTemplate, properties.getUsername(), properties.getSid(), properties.getPassword());

        //props.put("auto.commit.interval.ms", "1000");
        // props.put("auto.offset.reset", "earliest");
        props.put("bootstrap.servers", brokers);
        props.put("session.timeout.ms", "30000");
        props.put("key.deserializer", deserializer);
        props.put("value.deserializer", deserializer);
        props.put("key.serializer", serializer);
        props.put("value.serializer", serializer);
        props.put("security.protocol", "SASL_PLAINTEXT");
        props.put("sasl.mechanism", "PLAIN");
        props.put("sasl.jaas.config", jaasCfg);
        props.put("enable.auto.commit", "false");
        props.put("group.id", "xxx");
    }

    @PreDestroy
    public void close() {
        if (this.kafkaConsumer != null) {
            this.kafkaConsumer.close();
        }
    }

    @Bean(name = "dtsKafkaConsumer", destroyMethod = "close")
    public KafkaConsumer consumer(){
        this.kafkaConsumer = new KafkaConsumer(props);
        long startTimeStamp = System.currentTimeMillis() / 1000 - 600;
        assignOffsetToConsumer(kafkaConsumer, properties.getTopic(), startTimeStamp);
        return this.kafkaConsumer;
    }

    @Bean(name = "dtsKafkaListener", initMethod = "start")
    public DtsKafkaListener listener(@Autowired RecordProcessService recordProcessService,
                                     @Autowired KafkaConsumer kafkaConsumer,
                                     @Autowired @Qualifier("taskAsyncPool") Executor executor){
        DtsKafkaListener dtsKafkaListener = new DtsKafkaListener(recordProcessService, kafkaConsumer, executor);
        return dtsKafkaListener;
    }

    public boolean assignOffsetToConsumer(KafkaConsumer<String, byte[]> consumer, String topic , long startTime ){

        final List<PartitionInfo> partitionInfoList = consumer.partitionsFor(topic);
        if(partitionInfoList.isEmpty()){
            log.warn("#oms#dts#kafka topic:"+topic+" no partition");
            return false;
        }
        log.info("#oms#dts#kafka Number of Partitions : "+partitionInfoList.size());

        final List<TopicPartition> topicPartitions = new ArrayList<>();
        for (PartitionInfo pInfo : partitionInfoList) {
            TopicPartition partition = new TopicPartition(topic, pInfo.partition());
            topicPartitions.add(partition);
        }
        consumer.assign(topicPartitions);
        for(TopicPartition partition : topicPartitions ){
            OffsetAndTimestamp offSetTs = fetchOffsetByTime(consumer, partition, startTime);
            if( offSetTs == null ){
                log.warn("No Offset Found for partition : " + partition.partition());
                return false;
            }else {
                log.info(" Offset Found for partition : " +offSetTs.offset()+" " +partition.partition());
                log.info("FETCH offset success"+ " Offset " + offSetTs.offset() + " offSetTs " + offSetTs);
                consumer.seek(partition, offSetTs.offset());
            }
        }
        return true;
    }

    public static OffsetAndTimestamp fetchOffsetByTime(KafkaConsumer<String, byte[]> consumer, TopicPartition partition, long startTime){

        Map<TopicPartition, Long> query = new HashMap<>();
        query.put(partition, startTime);
        final Map<TopicPartition, OffsetAndTimestamp> offsetResult = consumer.offsetsForTimes(query);
        if( offsetResult == null || offsetResult.isEmpty() ) {
            log.error("#oms#dts#kafka No Offset to Fetch");
            return null;
        }

        // test
        for(Map.Entry<TopicPartition, OffsetAndTimestamp> m : offsetResult.entrySet()){
            log.info("#oms#dts#kafka key:" + m.getKey() + " value:" + m.getValue());
        }

        final OffsetAndTimestamp offsetTimestamp = offsetResult.get(partition);
        if(null == offsetTimestamp){
            log.error("#oms#dts#kafka No Offset Found for partition :" + partition.partition());
        }

        return offsetTimestamp;
    }
}

