package cn.getech.data.development.utils.kafka;

import cn.getech.data.intelligence.common.utils.DateUtils;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.KafkaAdminClient;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.time.Duration;
import java.util.*;
import java.util.concurrent.TimeUnit;

/**
 * @description: kafka kafka消费工具
 * @author: wangzhaowen：kiss
 * @create: 2020/11/26 11:13
 * @version: 2020/11/26
 **/
@Slf4j
public class LocalKafkaUtil {
    private static Properties prop = new Properties();

    static {

        prop.setProperty("auto.offset.reset", "earliest");
        prop.put("auto.commit.offset", false);
        prop.setProperty("session.timeout.ms", "60000");
        prop.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        prop.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    }

    public static List<Object> viewData(String kafkaUrl, String topic, String format, Integer limit) {
        String now = "view_" + DateUtils.format(new Date(), "yyyy_mm_dd_hh_mm_ss");
        prop.setProperty("group.id", now);
        prop.setProperty("bootstrap.servers", kafkaUrl);
        KafkaConsumer consumer = null;
        List<Object> list = Lists.newArrayList();
        int num = 0;//等待次数
        try {
            consumer = new KafkaConsumer<String, String>(prop);
            consumer.subscribe(Collections.singletonList(topic.trim()));
            while (list.size() <= limit && num < 5) {
                ConsumerRecords consumerRecords = consumer.poll(Duration.ofSeconds(1));
                if (!consumerRecords.isEmpty()) {
                    Iterator<ConsumerRecord<String, String>> valueIter = consumerRecords.iterator();
                    while (valueIter.hasNext() && list.size() <= limit) {
                        ConsumerRecord<String, String> record = valueIter.next();
                        String messageData = record.value();
                        try {
                            JSONObject data = JSONObject.parseObject(messageData);
                            list.add(data);
                        } catch (Exception e) {
                            list.add(messageData);
                            log.info("数据格式非json:{}", messageData);
                        }
                    }
                } else {
                    log.info("无数据");
                    num++;
                }
            }
        } catch (Exception e) {
            log.error("kafka数据获取失败:",e);
            list.add("kafka数据获取失败:" + e.getMessage());
        }finally {
            if (consumer != null) {
                consumer.close();
            }
        }
       
        return list;
    }

    public static Set topicList(String kafkaUrl) {
        String now = "view_" + DateUtils.format(new Date(), "yyyy_mm_dd_hh_mm_ss");
        prop.setProperty("group.id", now);
        prop.setProperty("bootstrap.servers", kafkaUrl);
        Set topics = Sets.newHashSet();
        try {
            KafkaConsumer consumer = new KafkaConsumer<String, String>(prop);
            topics = consumer.listTopics((Duration.ofMillis(2000))).keySet();
            consumer.close();
        } catch (Exception e) {
            log.error("获取topic数据异常",e);
        }
        return topics;
    }

    public static void produceData(  List<String> list,String topic, String kafkaUrl){
        prop.setProperty("bootstrap.servers", kafkaUrl);
        prop.put("acks", "all");
        prop.put("retries", 0);
        prop.put("batch.size", 16384);
        prop.put("linger.ms", 1);
        prop.put("buffer.memory", 33554432);
        prop.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        prop.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        Producer<String, String> producer = new KafkaProducer<>(prop);
        for (String data : list) {
            ProducerRecord<String, String> msg = new ProducerRecord<>(topic, data);
            producer.send(msg);
        }
    }

    public static void createTopic(String topicName, Integer partitionNum, Short replicationFactor, String kafkaUrl) {
        AdminClient client = null;
        try {
            if (!topicList(kafkaUrl).contains(topicName)) {//topic不存在就创建
                prop.setProperty("bootstrap.servers", kafkaUrl);
                client = KafkaAdminClient.create(prop);//创建Topic
                NewTopic topic = new NewTopic(topicName,
                        partitionNum == null ? 1 : partitionNum,
                        replicationFactor == null ? 1 : replicationFactor);
                client.createTopics(Collections.singletonList(topic));
            }
        } catch (Exception e) {
            log.warn("topic 创建失败:{}",e.getMessage());
        } finally {
            if (client != null) {
                client.close(20, TimeUnit.SECONDS);
            }
        }
    }
}
