package com.dky.satell.kafka.acquisition;

import com.alibaba.fastjson.JSONObject;
import com.dky.satell.entity.MessageConfig;
import com.dky.satell.kafka.model.MessageWrapper;
import com.dky.satell.util.ApplicationContextProvider;
import com.dky.satell.kafka.producer.KafkaProducer;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;

@Slf4j
public class Consumer extends Thread {

    private KafkaConsumer<Object,Object> consumer;
    private List<String> topicList;
    private MessageConfig messageConfig;

    public Consumer(MessageConfig messageConfig){
        this.messageConfig = messageConfig;
        Properties properties=new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,messageConfig.getNamesrvAddr());
        properties.put(ConsumerConfig.CLIENT_ID_CONFIG,messageConfig.getGroupId());
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,messageConfig.getGroupId());
        properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG,"30000");
        properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,"1000"); //自动提交(批量确认)
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        //一个新的group的消费者去消费一个topic
        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest"); //这个属性. 它能够消费昨天发布的数据
        consumer=new KafkaConsumer<>(properties);
        this.topicList = Arrays.asList(messageConfig.getTopic().split(","));
    }

    @Override public void run() {
        log.info("Kafka开始监听, 队列名称:[{}], 地址:[{}], topic:[{}], groupId:[{}]", messageConfig.getName(), messageConfig.getNamesrvAddr(), messageConfig.getTopic(), messageConfig.getGroupId());
        KafkaProducer kafkaProducer = ApplicationContextProvider.getBean(KafkaProducer.class);
        consumer.subscribe(topicList);
        try {
            while (true) {
                ConsumerRecords<Object,Object> consumerRecords = consumer.poll(Duration.ofSeconds(1));
                consumerRecords.forEach(record ->{
                    log.info("Kafka采集, topic:[{}], key:[{}] message:[{}]", record.topic(), record.key(), record.value());
                    MessageConfig kafkaConfig = JSONObject.parseObject(JSONObject.toJSONString(messageConfig), MessageConfig.class);
                    kafkaConfig.setTopic(record.topic());
                    MessageWrapper wrapper = new MessageWrapper();
                    wrapper.setMessageConfig(kafkaConfig);
                    wrapper.setPayload(record.value());
                    log.info("消息队列加载至kafka[{}]",wrapper);
                    kafkaProducer.sendMessage(wrapper);
                });
            }
        } catch (Exception e) {
            log.error("kafka消费者线程error", e);
        }
    }

    /**
     * 取消订阅 - 暂时存在线程不安全问题
     */
    public void unsubscribe() {
        log.info("Kafka队列名称[{}]停止监听", messageConfig.getName());
        consumer.unsubscribe();
    }
//
//    public static void main(String[] args) {
//        Consumer consumer = new Consumer("test_partition");
//        consumer.
//
//    }
}

